From 5866d0d16c79ca5c62507bdcb7d87669426e86d6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 24 May 2016 15:57:44 +0200 Subject: Allow refinements of new types Previously a refinement could only apply to a type bound in the parent. This restriction needs to be dropped for the new encoding of hk type parameters. --- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 52 ++++++++++++++++------ src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 3 +- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 6 ++- .../tools/dotc/core/tasty/TreeUnpickler.scala | 5 ++- 5 files changed, 50 insertions(+), 18 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 3ed1798ed..d73181fcb 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -50,7 +50,7 @@ object TypeApplications { * * [v1 X1: B1, ..., vn Xn: Bn] -> T * ==> - * Lambda$_v1...vn { type $hk_i: B_i, type $Apply = [X_i := this.$Arg_i] T } + * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } */ object TypeLambda { def apply(variances: List[Int], diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index f514a329e..632ab823a 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2849,8 +2849,12 @@ object Types { unique(new CachedClassInfo(prefix, cls, classParents, decls, selfInfo)) } - /** Type bounds >: lo <: hi */ - abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType { + /** Type bounds >: lo <: hi + * @param bindingKind: If != NoBinding, it indicates that this is + * an introduction of a higher-kinded type parameter. + * In that case it also defines the variance of the parameter. + */ + abstract case class TypeBounds(lo: Type, hi: Type)(val bindingKind: BindingKind) extends CachedProxyType with TypeType { assert(lo.isInstanceOf[TermType]) assert(hi.isInstanceOf[TermType]) @@ -2860,9 +2864,9 @@ object Types { override def underlying(implicit ctx: Context): Type = hi /** The non-alias type bounds type with given bounds */ - def derivedTypeBounds(lo: Type, hi: Type)(implicit ctx: Context) = - if ((lo eq this.lo) && (hi eq this.hi) && (variance == 0)) this - else TypeBounds(lo, hi) + def derivedTypeBounds(lo: Type, hi: Type, bk: BindingKind = this.bindingKind)(implicit ctx: Context) = + if ((lo eq this.lo) && (hi eq this.hi) && (bk == this.bindingKind) && (variance == 0)) this + else TypeBounds(lo, hi, bk) /** If this is an alias, a derived alias with the new variance, * Otherwise the type itself. @@ -2884,12 +2888,12 @@ object Types { def & (that: TypeBounds)(implicit ctx: Context): TypeBounds = if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) that else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) this - else TypeBounds(this.lo | that.lo, this.hi & that.hi) + else TypeBounds(this.lo | that.lo, this.hi & that.hi, this.bindingKind join that.bindingKind) def | (that: TypeBounds)(implicit ctx: Context): TypeBounds = if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) this else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) that - else TypeBounds(this.lo & that.lo, this.hi | that.hi) + else TypeBounds(this.lo & that.lo, this.hi | that.hi, this.bindingKind join that.bindingKind) override def & (that: Type)(implicit ctx: Context) = that match { case that: TypeBounds => this & that @@ -2909,6 +2913,7 @@ object Types { /** If this type and that type have the same variance, this variance, otherwise 0 */ final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2 + override def computeHash = doHash(variance, lo, hi) override def equals(that: Any): Boolean = that match { case that: TypeBounds => (this.lo eq that.lo) && (this.hi eq that.hi) && this.variance == that.variance @@ -2920,11 +2925,9 @@ object Types { if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)" } - class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) { - override def computeHash = doHash(variance, lo, hi) - } + class RealTypeBounds(lo: Type, hi: Type, bk: BindingKind) extends TypeBounds(lo, hi)(bk) - abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) { + abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias)(NoBinding) { /** pre: this is a type alias */ def derivedTypeAlias(alias: Type, variance: Int = this.variance)(implicit ctx: Context) = if ((alias eq this.alias) && (variance == this.variance)) this @@ -2952,12 +2955,11 @@ object Types { class CachedTypeAlias(alias: Type, variance: Int, hc: Int) extends TypeAlias(alias, variance) { myHash = hc - override def computeHash = doHash(variance, lo, hi) } object TypeBounds { - def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds = - unique(new RealTypeBounds(lo, hi)) + def apply(lo: Type, hi: Type, bk: BindingKind = NoBinding)(implicit ctx: Context): TypeBounds = + unique(new RealTypeBounds(lo, hi, bk)) def empty(implicit ctx: Context) = apply(defn.NothingType, defn.AnyType) def upper(hi: Type)(implicit ctx: Context) = apply(defn.NothingType, hi) def lower(lo: Type)(implicit ctx: Context) = apply(lo, defn.AnyType) @@ -2969,6 +2971,28 @@ object Types { def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) } + /** A value class defining the interpretation of a TypeBounds + * as either a regular type bounds or a binding (i.e. introduction) of a + * higher-kinded type parameter. + */ + class BindingKind(val n: Byte) extends AnyVal { + def join(that: BindingKind) = + if (this == that) this + else if (this == NoBinding) that + else if (that == NoBinding) this + else NonvariantBinding + } + + val NoBinding = new BindingKind(0) // Regular type bounds + val ContravariantBinding = new BindingKind(1) // Bounds for contravariant hk type param + val NonvariantBinding = new BindingKind(2) // Bounds for nonvariant hk type param + val CovariantBinding = new BindingKind(3) // Bounds for covariant hk type param + + object BindingKind { + def fromVariance(v: Int): BindingKind = new BindingKind((v + NonvariantBinding.n).toByte) + def toVariance(bk: BindingKind): Int = bk.n + } + // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index 221170622..a42958e75 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -129,7 +129,7 @@ Standard-Section: "ASTs" TopLevelStat* SUPERtype Length this_Type underlying_Type REFINEDtype Length underlying_Type refinement_NameRef info_Type APPLIEDtype Length tycon_Type arg_Type* - TYPEBOUNDS Length low_Type high_Type + TYPEBOUNDS Length low_Type high_Type bindingKind_Nat? TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)? ANNOTATED Length underlying_Type fullAnnotation_Term ANDtype Length left_Type right_Type @@ -494,6 +494,7 @@ object TastyFormat { SELFDEF | REFINEDtype => 1 case RENAMED | PARAMtype => 2 case POLYtype | METHODtype => -1 + case TYPEBOUNDS => -2 case _ => 0 } } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 37b9341eb..4cfd7727c 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -233,7 +233,11 @@ class TreePickler(pickler: TastyPickler) { } case tpe: TypeBounds => writeByte(TYPEBOUNDS) - withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) } + withLength { + pickleType(tpe.lo, richTypes) + pickleType(tpe.hi, richTypes) + if (tpe.bindingKind != NoBinding) writeNat(tpe.bindingKind.n) + } case tpe: AnnotatedType => writeByte(ANNOTATED) withLength { pickleType(tpe.tpe, richTypes); pickleTree(tpe.annot.tree) } diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 91ac4ea3e..2b8e5f019 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -266,7 +266,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { case APPLIEDtype => readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => - TypeBounds(readType(), readType()) + val lo = readType() + val hi = readType() + val bk = ifBefore(end)(new BindingKind(readNat().toByte), NoBinding) + TypeBounds(lo, hi, bk) case TYPEALIAS => val alias = readType() val variance = -- cgit v1.2.3 From d30f441ae986c144e739223be97b906b3bbd43dc Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:01:12 +0200 Subject: Allow general recursion in refined types. Treat parent like refinedInfo. Introduce isBinding convenience method in TypeBounds. --- src/dotty/tools/dotc/core/TypeApplications.scala | 22 +++++------ src/dotty/tools/dotc/core/TypeComparer.scala | 5 +-- src/dotty/tools/dotc/core/TypeErasure.scala | 2 +- src/dotty/tools/dotc/core/TypeOps.scala | 4 +- src/dotty/tools/dotc/core/Types.scala | 43 ++++++++++++++++------ src/dotty/tools/dotc/core/tasty/TreePickler.scala | 2 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 4 +- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 +- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 2 +- src/dotty/tools/dotc/typer/Applications.scala | 2 +- src/dotty/tools/dotc/typer/Checking.scala | 4 +- src/dotty/tools/dotc/typer/TypeAssigner.scala | 4 +- src/dotty/tools/dotc/typer/Variances.scala | 4 +- 13 files changed, 59 insertions(+), 41 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d73181fcb..8ab5fbf02 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -70,19 +70,19 @@ object TypeApplications { } def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { - case app @ RefinedType(parent, tpnme.hkApply) => + case app @ RefinedType(parent, tpnme.hkApply, refinedInfo) => val cls = parent.typeSymbol val variances = cls.typeParams.map(_.variance) def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match { - case t @ RefinedType(p, rname) => + case t @ RefinedType(p, rname, rinfo) => assert(rname.isHkArgName) - collectBounds(p, t.refinedInfo.bounds :: acc) + collectBounds(p, rinfo.bounds :: acc) case TypeRef(_, lname) => assert(lname.isLambdaTraitName) acc } val argBounds = collectBounds(parent, Nil) - Some((variances, argBounds, app.refinedInfo.argInfo)) + Some((variances, argBounds, refinedInfo.argInfo)) case _ => None } @@ -153,9 +153,9 @@ object TypeApplications { def stripArgs(tp: Type, n: Int): Type = if (n == 0) tp else tp match { - case tp @ RefinedType(parent, pname) if pname == tparams(n - 1).name => + case tp @ RefinedType(parent, pname, rinfo) if pname == tparams(n - 1).name => val res = stripArgs(parent, n - 1) - if (res.exists) argBuf += tp.refinedInfo.argInfo + if (res.exists) argBuf += rinfo.argInfo res case _ => NoType @@ -335,7 +335,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** The Lambda trait underlying a type lambda */ def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match { - case RefinedType(parent, tpnme.hkApply) => + case RefinedType(_, tpnme.hkApply, _) => val sym = self.classSymbol if (sym.isLambdaTrait) sym else NoSymbol case TypeBounds(lo, hi) => hi.LambdaTrait @@ -345,7 +345,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Is receiver type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case RefinedType(_, name) => name == tpnme.hkApply + case RefinedType(_, tpnme.hkApply, _) => true case TypeBounds(_, hi) => hi.isHK case _ => false } @@ -580,7 +580,7 @@ class TypeApplications(val self: Type) extends AnyVal { } assert(args.nonEmpty) matchParams(self, typParams, args) match { - case refined @ RefinedType(_, pname) if pname.isHkArgName => + case refined @ RefinedType(_, pname, _) if pname.isHkArgName => TypeRef(refined, tpnme.hkApply) case refined => refined @@ -671,7 +671,7 @@ class TypeApplications(val self: Type) extends AnyVal { case TypeBounds(_, hi) => hi.baseTypeWithArgs(base) case _ => default } - case tp @ RefinedType(parent, name) if !tp.member(name).symbol.is(ExpandedTypeParam) => + case tp @ RefinedType(parent, name, _) if !tp.member(name).symbol.is(ExpandedTypeParam) => tp.wrapIfMember(parent.baseTypeWithArgs(base)) case tp: TermRef => tp.underlying.baseTypeWithArgs(base) @@ -731,7 +731,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def withoutArgs(typeArgs: List[Type]): Type = typeArgs match { case _ :: typeArgs1 => - val RefinedType(tycon, _) = self + val RefinedType(tycon, _, _) = self tycon.withoutArgs(typeArgs1) case nil => self diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 2523c6b9a..d1dc4069d 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -465,7 +465,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => def isNullable(tp: Type): Boolean = tp.dealias match { case tp: TypeRef => tp.symbol.isNullableClass - case RefinedType(parent, _) => isNullable(parent) + case tp: RefinedType => isNullable(tp.parent) case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2) case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2) case _ => false @@ -738,9 +738,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * @return The parent type of `tp2` after skipping the matching refinements. */ private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match { - case tp1 @ RefinedType(parent1, name1) + case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName && - tp1.refinedInfo.isInstanceOf[TypeAlias] && !tp2.refinementRefersToThis && !tp1.refinementRefersToThis => tp2.parent match { diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index 39d02e069..a5aabe9c4 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -430,7 +430,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean // constructor method should not be semi-erased. else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp) else this(tp) - case RefinedType(parent, _) if !(parent isRef defn.ArrayClass) => + case RefinedType(parent, _, _) if !(parent isRef defn.ArrayClass) => eraseResult(parent) case _ => this(tp) diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 1288c0b23..72b0c87c4 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -364,8 +364,8 @@ trait TypeOps { this: Context => // TODO: Make standalone object. def normalizeToRef(tp: Type): TypeRef = tp.dealias match { case tp: TypeRef => tp - case tp @ RefinedType(tp1, name: TypeName) => - tp.refinedInfo match { + case tp @ RefinedType(tp1, name: TypeName, rinfo) => + rinfo match { case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) => // Don't record refinements of the form X = this.X (These can arise using named parameters). typr.println(s"dropping refinement $tp") diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 632ab823a..b26fd6373 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1035,7 +1035,7 @@ object Types { /** the self type of the underlying classtype */ def givenSelfType(implicit ctx: Context): Type = this match { - case tp @ RefinedType(parent, name) => tp.wrapIfMember(parent.givenSelfType) + case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType) case tp: ThisType => tp.tref.givenSelfType case tp: TypeProxy => tp.underlying.givenSelfType case _ => NoType @@ -2029,10 +2029,11 @@ object Types { * @param infoFn: A function that produces the info of the refinement declaration, * given the refined type itself. */ - abstract case class RefinedType(parent: Type, refinedName: Name) + abstract case class RefinedType(private var myParent: Type, refinedName: Name, private var myRefinedInfo: Type) extends CachedProxyType with BindingType with ValueType { - val refinedInfo: Type + final def parent = myParent + final def refinedInfo = myRefinedInfo private var refinementRefersToThisCache: Boolean = _ private var refinementRefersToThisKnown: Boolean = false @@ -2053,7 +2054,7 @@ object Types { def checkInst(implicit ctx: Context): this.type = { if (refinedName == tpnme.hkApply) parent.stripTypeVar match { - case RefinedType(_, name) if name.isHkArgName => // ok + case RefinedType(_, name, _) if name.isHkArgName => // ok case _ => badInst } this @@ -2076,16 +2077,18 @@ object Types { case _ => false } - override def computeHash = doHash(refinedName, refinedInfo, parent) + override def computeHash = { + assert(parent.exists) + doHash(refinedName, refinedInfo, parent) + } + override def toString = s"RefinedType($parent, $refinedName, $refinedInfo | $hashCode)" } - class CachedRefinedType(parent: Type, refinedName: Name, infoFn: RefinedType => Type) extends RefinedType(parent, refinedName) { - val refinedInfo = infoFn(this) - } + class CachedRefinedType(refinedName: Name) extends RefinedType(NoType, refinedName, NoType) - class PreHashedRefinedType(parent: Type, refinedName: Name, override val refinedInfo: Type, hc: Int) - extends RefinedType(parent, refinedName) { + class PreHashedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int) + extends RefinedType(parent, refinedName, refinedInfo) { myHash = hc override def computeHash = unsupported("computeHash") } @@ -2095,9 +2098,22 @@ object Types { if (names.isEmpty) parent else make(RefinedType(parent, names.head, infoFns.head), names.tail, infoFns.tail) + def recursive(parentFn: RefinedType => Type, names: List[Name], infoFns: List[RefinedType => Type])(implicit ctx: Context): RefinedType = { + val refinements: List[RefinedType] = names.map(new CachedRefinedType(_)) + val last = refinements.last + (refinements, infoFns).zipped.foreach((rt, infoFn) => rt.myRefinedInfo = infoFn(last)) + (parentFn(last) /: refinements) { (parent, rt) => + rt.myParent = parent + ctx.base.uniqueRefinedTypes.enterIfNew(rt).checkInst + }.asInstanceOf[RefinedType] + } + def apply(parent: Type, name: Name, infoFn: RefinedType => Type)(implicit ctx: Context): RefinedType = { assert(!ctx.erasedTypes || ctx.mode.is(Mode.Printing)) - ctx.base.uniqueRefinedTypes.enterIfNew(new CachedRefinedType(parent, name, infoFn)).checkInst + val res: RefinedType = new CachedRefinedType(name) + res.myParent = parent + res.myRefinedInfo = infoFn(res) + ctx.base.uniqueRefinedTypes.enterIfNew(res).checkInst } def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = { @@ -2668,8 +2684,8 @@ object Types { } def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match { case tp: OrType => true + case tp: RefinedType => isOrType(tp.parent) case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2) - case RefinedType(parent, _) => isOrType(parent) case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi) case _ => false } @@ -2860,6 +2876,7 @@ object Types { assert(hi.isInstanceOf[TermType]) def variance: Int = 0 + def isBinding = bindingKind != NoBinding override def underlying(implicit ctx: Context): Type = hi @@ -2876,6 +2893,8 @@ object Types { case _ => this } + def withBindingKind(bk: BindingKind)(implicit ctx: Context) = derivedTypeBounds(lo, hi, bk) + def contains(tp: Type)(implicit ctx: Context): Boolean = tp match { case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi case tp: ClassInfo => diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 4cfd7727c..0cc08f2d9 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -236,7 +236,7 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.lo, richTypes) pickleType(tpe.hi, richTypes) - if (tpe.bindingKind != NoBinding) writeNat(tpe.bindingKind.n) + if (tpe.isBinding) writeNat(tpe.bindingKind.n) } case tpe: AnnotatedType => writeByte(ANNOTATED) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 71a919ca3..687e9279b 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -620,9 +620,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas def removeSingleton(tp: Type): Type = if (tp isRef defn.SingletonClass) defn.AnyType else tp def elim(tp: Type): Type = tp match { - case tp @ RefinedType(parent, name) => + case tp @ RefinedType(parent, name, rinfo) => val parent1 = elim(tp.parent) - tp.refinedInfo match { + rinfo match { case TypeAlias(info: TypeRef) if isBound(info) => RefinedType(parent1, name, info.symbol.info) case info: TypeRef if isBound(info) => diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 1e2ba0b4d..bac180efe 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -110,7 +110,7 @@ class PlainPrinter(_ctx: Context) extends Printer { */ private def refinementChain(tp: Type): List[Type] = tp :: (tp match { - case RefinedType(parent, _) => refinementChain(parent.stripTypeVar) + case tp: RefinedType => refinementChain(tp.parent.stripTypeVar) case _ => Nil }) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 614a274b4..e0fd47900 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -123,7 +123,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def contains(tp1: Type, tp2: Type): Boolean = tp1.eq(tp2) || { tp1.stripTypeVar match { - case RefinedType(parent, _) => contains(parent, tp2) + case tp1: RefinedType => contains(tp1.parent, tp2) case _ => false } } diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index a9184c7e5..14071e27c 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -741,7 +741,7 @@ trait Applications extends Compatibility { self: Typer => def isSubTypeOfParent(subtp: Type, tp: Type)(implicit ctx: Context): Boolean = if (subtp <:< tp) true else tp match { - case RefinedType(parent, _) => isSubTypeOfParent(subtp, parent) + case tp: RefinedType => isSubTypeOfParent(subtp, tp.parent) case _ => false } diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 22d2407bc..37753fe65 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -172,8 +172,8 @@ object Checking { case tp: TermRef => this(tp.info) mapOver(tp) - case tp @ RefinedType(parent, name) => - tp.derivedRefinedType(this(parent), name, this(tp.refinedInfo, nestedCycleOK, nestedCycleOK)) + case tp @ RefinedType(parent, name, rinfo) => + tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp @ TypeRef(pre, name) => try { // A prefix is interesting if it might contain (transitively) a reference diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index 995fa43ca..f439c4c99 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -98,9 +98,9 @@ trait TypeAssigner { val base = apply(tycon) val args = tp.baseArgInfos(base.typeSymbol) if (base.typeParams.length == args.length) base.appliedTo(args) else base - case tp @ RefinedType(parent, name) if variance > 0 => + case tp @ RefinedType(parent, name, rinfo) if variance > 0 => val parent1 = apply(tp.parent) - val refinedInfo1 = apply(tp.refinedInfo) + val refinedInfo1 = apply(rinfo) if (toAvoid(refinedInfo1)) { typr.println(s"dropping refinement from $tp") parent1 diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala index 55e6b5232..bc9730140 100644 --- a/src/dotty/tools/dotc/typer/Variances.scala +++ b/src/dotty/tools/dotc/typer/Variances.scala @@ -75,8 +75,8 @@ object Variances { case tp @ TypeBounds(lo, hi) => if (lo eq hi) compose(varianceInType(hi)(tparam), tp.variance) else flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam) - case tp @ RefinedType(parent, _) => - varianceInType(parent)(tparam) & varianceInType(tp.refinedInfo)(tparam) + case tp @ RefinedType(parent, _, rinfo) => + varianceInType(parent)(tparam) & varianceInType(rinfo)(tparam) case tp @ MethodType(_, paramTypes) => flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam) case ExprType(restpe) => -- cgit v1.2.3 From 5d0318681f8c368f04796da5dd11ee1c9fcbdbd0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 26 May 2016 08:51:14 +0200 Subject: Better printing of skolems They not print similar to scalac: "?x" where `x` is a unique number. Todo: An offline explanation what they are, similar to javac. I.e. ... ?3 ... where ?3: T --- src/dotty/tools/dotc/core/Types.scala | 7 +++++++ src/dotty/tools/dotc/printing/PlainPrinter.scala | 3 +-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index b26fd6373..8eae84a51 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2596,6 +2596,13 @@ object Types { if (info eq this.info) this else SkolemType(info) override def hashCode: Int = identityHash override def equals(that: Any) = this eq that.asInstanceOf[AnyRef] + + private var myRepr: String = null + def repr(implicit ctx: Context) = { + if (myRepr == null) myRepr = ctx.freshName("?") + myRepr + } + override def toString = s"Skolem($info)" } diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index bac180efe..7053a0ea3 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -235,8 +235,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: RefinedThis => s"${nameString(tp.binder.typeSymbol)}{...}.this" case tp: SkolemType => - if (homogenizedView) toText(tp.info) - else "" + if (homogenizedView) toText(tp.info) else tp.repr } } -- cgit v1.2.3 From cdb4a1cb986f25eddf411dfc45aeb20dd994f7d5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:02:34 +0200 Subject: New type lambda scheme for hk types --- docs/SyntaxSummary.txt | 2 +- src/dotty/tools/dotc/ast/Trees.scala | 16 ++++++++++ src/dotty/tools/dotc/ast/untpd.scala | 1 + src/dotty/tools/dotc/config/Config.scala | 2 ++ src/dotty/tools/dotc/core/TypeApplications.scala | 39 ++++++++++++++++++++++-- src/dotty/tools/dotc/parsing/Parsers.scala | 11 ++++++- src/dotty/tools/dotc/typer/TypeAssigner.scala | 10 ++++++ src/dotty/tools/dotc/typer/Typer.scala | 9 ++++++ 8 files changed, 86 insertions(+), 4 deletions(-) diff --git a/docs/SyntaxSummary.txt b/docs/SyntaxSummary.txt index d4f7ceade..45937fb54 100644 --- a/docs/SyntaxSummary.txt +++ b/docs/SyntaxSummary.txt @@ -96,6 +96,7 @@ grammar. ClassQualifier ::= `[' id `]' Type ::= FunArgTypes `=>' Type Function(ts, t) + | HkTypeParamClause `->' Type TypeLambda(ps, t) | InfixType FunArgTypes ::= InfixType | `(' [ FunArgType {`,' FunArgType } ] `)' @@ -125,7 +126,6 @@ grammar. TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} ContextBounds(typeBounds, tps) Expr ::= FunParams `=>' Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr) - | Expr1 FunParams ::= Bindings | [`implicit'] id | `_' diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala index 7463449c5..20ae02994 100644 --- a/src/dotty/tools/dotc/ast/Trees.scala +++ b/src/dotty/tools/dotc/ast/Trees.scala @@ -594,6 +594,12 @@ object Trees { def forwardTo = tpt } + /** [typeparams] -> tpt */ + case class TypeLambdaTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T]) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = TypeLambdaTree[T] + } + /** => T */ case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T]) extends TypTree[T] { @@ -851,6 +857,7 @@ object Trees { type OrTypeTree = Trees.OrTypeTree[T] type RefinedTypeTree = Trees.RefinedTypeTree[T] type AppliedTypeTree = Trees.AppliedTypeTree[T] + type TypeLambdaTree = Trees.TypeLambdaTree[T] type ByNameTypeTree = Trees.ByNameTypeTree[T] type TypeBoundsTree = Trees.TypeBoundsTree[T] type Bind = Trees.Bind[T] @@ -1028,6 +1035,10 @@ object Trees { case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)) } + def TypeLambdaTree(tree: Tree)(tparams: List[TypeDef], body: Tree): TypeLambdaTree = tree match { + case tree: TypeLambdaTree if (tparams eq tree.tparams) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.TypeLambdaTree(tparams, body)) + } def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match { case tree: ByNameTypeTree if result eq tree.result => tree case _ => finalize(tree, untpd.ByNameTypeTree(result)) @@ -1160,6 +1171,8 @@ object Trees { cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) case AppliedTypeTree(tpt, args) => cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) + case TypeLambdaTree(tparams, body) => + cpy.TypeLambdaTree(tree)(transformSub(tparams), transform(body)) case ByNameTypeTree(result) => cpy.ByNameTypeTree(tree)(transform(result)) case TypeBoundsTree(lo, hi) => @@ -1264,6 +1277,9 @@ object Trees { this(this(x, tpt), refinements) case AppliedTypeTree(tpt, args) => this(this(x, tpt), args) + case TypeLambdaTree(tparams, body) => + implicit val ctx: Context = localCtx + this(this(x, tparams), body) case ByNameTypeTree(result) => this(x, result) case TypeBoundsTree(lo, hi) => diff --git a/src/dotty/tools/dotc/ast/untpd.scala b/src/dotty/tools/dotc/ast/untpd.scala index c7a7036c3..b3f8747dc 100644 --- a/src/dotty/tools/dotc/ast/untpd.scala +++ b/src/dotty/tools/dotc/ast/untpd.scala @@ -137,6 +137,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def OrTypeTree(left: Tree, right: Tree): OrTypeTree = new OrTypeTree(left, right) def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args) + def TypeLambdaTree(tparams: List[TypeDef], body: Tree): TypeLambdaTree = new TypeLambdaTree(tparams, body) def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result) def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi) def Bind(name: Name, body: Tree): Bind = new Bind(name, body) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 3cc3091b5..be8a367d7 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -8,6 +8,8 @@ object Config { final val cacheMemberNames = true final val cacheImplicitScopes = true + final val newHK = false + final val checkCacheMembersNamed = false /** When updating a constraint bound, check that the constrained parameter diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 8ab5fbf02..2411e0bb2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -15,8 +15,35 @@ import StdNames.tpnme import util.Positions.Position import config.Printers._ import collection.mutable + import dotty.tools.dotc.config.Config import java.util.NoSuchElementException +object TypeApplicationsNewHK { + import TypeApplications._ + + object TypeLambda { + def apply(argBindingFns: List[RefinedType => TypeBounds], + bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { + val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) + RefinedType.recursive(bodyFn, argNames, argBindingFns) + } + + def unapply(tp: Type)(implicit ctx: Context): Option[(List[TypeBounds], Type)] = { + def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { + case t @ RefinedType(p, rname, rinfo: TypeBounds) + if rname.isHkArgName && rinfo.isBinding => + decompose(p, rinfo.bounds :: acc) + case _ => + (acc, t) + } + decompose(tp, Nil) match { + case (Nil, _) => None + case x => Some(x) + } + } + } +} + object TypeApplications { /** Assert type is not a TypeBounds instance and return it unchanged */ @@ -51,6 +78,14 @@ object TypeApplications { * [v1 X1: B1, ..., vn Xn: Bn] -> T * ==> * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } + * + * [X] -> List[X] + * + * List { type List$A = this.$hk_0 } { type $hk_0 } + * + * [X] -> X + * + * mu(this) this.$hk_0 & { type $hk_0 } */ object TypeLambda { def apply(variances: List[Int], @@ -388,9 +423,9 @@ class TypeApplications(val self: Type) extends AnyVal { /** Replace references to type parameters with references to hk arguments `this.$hk_i` * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. */ - private[TypeApplications] def internalizeFrom[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RefinedType => T = + def internalizeFrom[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RefinedType => T = (rt: RefinedType) => - new ctx.SafeSubstMap(tparams , argRefs(rt, tparams.length)) + new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) .apply(self).asInstanceOf[T] /** Lambda abstract `self` with given type parameters. Examples: diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index ded17c67c..0cc392bad 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -223,7 +223,9 @@ object Parsers { } // DEBUG private def expectedMsg(token: Int): String = - showToken(token) + " expected but " + showToken(in.token) + " found." + expectedMessage(showToken(token)) + private def expectedMessage(what: String): String = + s"$what expected but ${showToken(in.token)} found" /** Consume one token of the specified type, or * signal an error if it is not there. @@ -648,6 +650,7 @@ object Parsers { /* ------------- TYPES ------------------------------------------------------ */ /** Type ::= FunArgTypes `=>' Type + * | HkTypeParamClause `->' Type * | InfixType * FunArgTypes ::= InfixType * | `(' [ FunArgType {`,' FunArgType } ] `)' @@ -677,6 +680,12 @@ object Parsers { } } } + else if (in.token == LBRACKET) { + val tparams = typeParamClause(ParamOwner.TypeParam) + if (isIdent && in.name.toString == "->") + atPos(in.skipToken())(TypeLambdaTree(tparams, typ())) + else { syntaxErrorOrIncomplete(expectedMessage("`->'")); typ() } + } else infixType() in.token match { diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index f439c4c99..b686e6eed 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -425,6 +425,16 @@ trait TypeAssigner { tree.withType(ownType) } + def assignType(tree: untpd.TypeLambdaTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = { + val tparams = tparamDefs.map(_.symbol) + val argBindingFns = tparams.map(tparam => + tparam.info.bounds + .withBindingKind(BindingKind.fromVariance(tparam.variance)) + .internalizeFrom(tparams)) + val bodyFn = body.tpe.internalizeFrom(tparams) + tree.withType(TypeApplicationsNewHK.TypeLambda(argBindingFns, bodyFn)) + } + def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = tree.withType(ExprType(result.tpe)) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 268020ec5..d5a32dbc0 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -958,6 +958,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } } + def typedTypeLambdaTree(tree: untpd.TypeLambdaTree)(implicit ctx: Context): Tree = track("typedTypeLambdaTree") { + val TypeLambdaTree(tparams, body) = tree + index(tparams) + val tparams1 = tparams.mapconserve(typed(_).asInstanceOf[TypeDef]) + val body1 = typedType(tree.body) + assignType(cpy.TypeLambdaTree(tree)(tparams1, body1), tparams1, body1) + } + def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { val result1 = typed(tree.result) assignType(cpy.ByNameTypeTree(tree)(result1), result1) @@ -1272,6 +1280,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case tree: untpd.OrTypeTree => typedOrTypeTree(tree) case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree) case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree) + case tree: untpd.TypeLambdaTree => typedTypeLambdaTree(tree)(localContext(tree, NoSymbol).setNewScope) case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree) case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree) case tree: untpd.Alternative => typedAlternative(tree, pt) -- cgit v1.2.3 From 850dc6f2fb3b6228f2586ce0790621e80f664afe Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:04:03 +0200 Subject: Introduce recursive types Map self-references in refinements to recursive types. This commit does this for refinement types appearing in source. We still have to do it for unpickled refinements. Test apply-equiv got moved to pending because it simulates the old higher-kinded type encoding in source, which relies on the old representation in terms of self-referential refinement types. The plan is not to adapt this encoding to the new representation, but to replace it with a different encoding that makes critical use of the added power of recursive types. Use recursive types also when unpickling from Scala 2.x. Add mapInfo method to Denotations. --- src/dotty/tools/dotc/core/Denotations.scala | 10 +- src/dotty/tools/dotc/core/Substituters.scala | 22 ++++ src/dotty/tools/dotc/core/SymDenotations.scala | 3 +- src/dotty/tools/dotc/core/TypeApplications.scala | 40 +++++-- src/dotty/tools/dotc/core/TypeComparer.scala | 31 ++++- src/dotty/tools/dotc/core/TypeOps.scala | 23 +++- src/dotty/tools/dotc/core/Types.scala | 129 +++++++++++++++++++-- src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 8 +- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 8 ++ .../tools/dotc/core/tasty/TreeUnpickler.scala | 7 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 13 +-- src/dotty/tools/dotc/printing/PlainPrinter.scala | 14 +++ src/dotty/tools/dotc/typer/Checking.scala | 8 +- src/dotty/tools/dotc/typer/TypeAssigner.scala | 4 +- src/dotty/tools/dotc/typer/Typer.scala | 7 +- src/dotty/tools/dotc/typer/Variances.scala | 2 + tests/pending/pos/apply-equiv.scala | 14 +++ tests/pos/apply-equiv.scala | 14 --- tests/pos/lookuprefined.scala | 6 +- 19 files changed, 310 insertions(+), 53 deletions(-) create mode 100644 tests/pending/pos/apply-equiv.scala delete mode 100644 tests/pos/apply-equiv.scala diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala index 5ce8cbcd8..494df7547 100644 --- a/src/dotty/tools/dotc/core/Denotations.scala +++ b/src/dotty/tools/dotc/core/Denotations.scala @@ -146,6 +146,9 @@ object Denotations { /** Is this denotation different from NoDenotation or an ErrorDenotation? */ def exists: Boolean = true + /** A denotation with the info of this denotation transformed using `f` */ + def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation + /** If this denotation does not exist, fallback to alternative */ final def orElse(that: => Denotation) = if (this.exists) this else that @@ -242,7 +245,7 @@ object Denotations { } else if (exists && !qualifies(symbol)) NoDenotation else asSingleDenotation - } + } /** Form a denotation by conjoining with denotation `that`. * @@ -456,6 +459,8 @@ object Denotations { else if (!d2.exists) d1 else derivedMultiDenotation(d1, d2) } + def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation = + derivedMultiDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) def derivedMultiDenotation(d1: Denotation, d2: Denotation) = if ((d1 eq denot1) && (d2 eq denot2)) this else MultiDenotation(d1, d2) override def toString = alternatives.mkString(" ") @@ -488,6 +493,9 @@ object Denotations { if ((symbol eq this.symbol) && (info eq this.info)) this else newLikeThis(symbol, info) + def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = + derivedSingleDenotation(symbol, f(info)) + def orElse(that: => SingleDenotation) = if (this.exists) this else that def altsWith(p: Symbol => Boolean): List[SingleDenotation] = diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala index 0083ac626..4598aaa20 100644 --- a/src/dotty/tools/dotc/core/Substituters.scala +++ b/src/dotty/tools/dotc/core/Substituters.scala @@ -197,6 +197,24 @@ trait Substituters { this: Context => .mapOver(tp) } + final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type = + tp match { + case tp @ RecThis(binder) => + if (binder eq from) to else tp + case tp: NamedType => + if (tp.currentSymbol.isStatic) tp + else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap)) + case _: ThisType | _: BoundType | NoPrefix => + tp + case tp: RefinedType => + tp.derivedRefinedType(substRecThis(tp.parent, from, to, theMap), tp.refinedName, substRecThis(tp.refinedInfo, from, to, theMap)) + case tp: TypeAlias => + tp.derivedTypeAlias(substRecThis(tp.alias, from, to, theMap)) + case _ => + (if (theMap != null) theMap else new SubstRecThisMap(from, to)) + .mapOver(tp) + } + final def substParam(tp: Type, from: ParamType, to: Type, theMap: SubstParamMap): Type = tp match { case tp: BoundType => @@ -270,6 +288,10 @@ trait Substituters { this: Context => def apply(tp: Type): Type = substRefinedThis(tp, from, to, this) } + final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap { + def apply(tp: Type): Type = substRecThis(tp, from, to, this) + } + final class SubstParamMap(from: ParamType, to: Type) extends DeepTypeMap { def apply(tp: Type) = substParam(tp, from, to, this) } diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 5c4e120a8..7715885c4 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1121,10 +1121,11 @@ object SymDenotations { def debugString = toString + "#" + symbol.id // !!! DEBUG - def hasSkolems(tp: Type): Boolean = tp match { + def hasSkolems(tp: Type): Boolean = tp match { case tp: SkolemType => true case tp: NamedType => hasSkolems(tp.prefix) case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo) + case tp: RecType => hasSkolems(tp.parent) case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType) case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType) case tp: ExprType => hasSkolems(tp.resType) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 2411e0bb2..bd115fefb 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -22,10 +22,16 @@ object TypeApplicationsNewHK { import TypeApplications._ object TypeLambda { - def apply(argBindingFns: List[RefinedType => TypeBounds], - bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { + def apply(argBindingFns: List[RecType => TypeBounds], + bodyFn: RecType => Type)(implicit ctx: Context): Type = { val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) - RefinedType.recursive(bodyFn, argNames, argBindingFns) + var idx = 0 + RecType.closeOver(rt => + (bodyFn(rt) /: argBindingFns) { (parent, argBindingFn) => + val res = RefinedType(parent, tpnme.hkArg(idx), argBindingFn(rt)) + idx += 1 + res + }) } def unapply(tp: Type)(implicit ctx: Context): Option[(List[TypeBounds], Type)] = { @@ -33,6 +39,8 @@ object TypeApplicationsNewHK { case t @ RefinedType(p, rname, rinfo: TypeBounds) if rname.isHkArgName && rinfo.isBinding => decompose(p, rinfo.bounds :: acc) + case t: RecType => + decompose(t.parent, acc) case _ => (acc, t) } @@ -78,13 +86,13 @@ object TypeApplications { * [v1 X1: B1, ..., vn Xn: Bn] -> T * ==> * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } - * + * * [X] -> List[X] - * + * * List { type List$A = this.$hk_0 } { type $hk_0 } - * + * * [X] -> X - * + * * mu(this) this.$hk_0 & { type $hk_0 } */ object TypeLambda { @@ -212,6 +220,10 @@ object TypeApplications { def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i))) + /** The references `.this.$hk0, ..., .this.$hk`. */ + def argRefs(rt: RecType, n: Int)(implicit ctx: Context) = + List.range(0, n).map(i => RecThis(rt).select(tpnme.hkArg(i))) + /** Merge `tp1` and `tp2` under a common lambda, combining them with `op`. * @param tparams1 The type parameters of `tp1` * @param tparams2 The type parameters of `tp2` @@ -400,7 +412,7 @@ class TypeApplications(val self: Type) extends AnyVal { * but without forcing anything. */ def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match { - case self: RefinedType => + case self: RefinedOrRecType => self.parent.classNotLambda case self: TypeRef => self.denot.exists && { @@ -428,6 +440,14 @@ class TypeApplications(val self: Type) extends AnyVal { new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) .apply(self).asInstanceOf[T] + /** Replace references to type parameters with references to hk arguments `this.$hk_i` + * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. + */ + def recursify[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RecType => T = + (rt: RecType) => + new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) + .apply(self).asInstanceOf[T] + /** Lambda abstract `self` with given type parameters. Examples: * * type T[X] = U becomes type T = [X] -> U @@ -546,6 +566,8 @@ class TypeApplications(val self: Type) extends AnyVal { arg.prefix.select(boundLambda) case arg: RefinedType => arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) + case arg: RecType => + arg.derivedRecType(adaptArg(arg.parent)) case arg @ TypeAlias(alias) => arg.derivedTypeAlias(adaptArg(alias)) case arg @ TypeBounds(lo, hi) => @@ -814,6 +836,8 @@ class TypeApplications(val self: Type) extends AnyVal { } case tp: RefinedType => recur(tp.refinedInfo) || recur(tp.parent) + case tp: RecType => + recur(tp.parent) case tp: TypeBounds => recur(tp.lo) || recur(tp.hi) case tp: AnnotatedType => diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index d1dc4069d..9909c9e8a 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -378,6 +378,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) } compareRefined + case tp2: RecType => + val tp1stable = ensureStableSingleton(tp1) + isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.substRecThis(tp2, tp1stable)) case OrType(tp21, tp22) => // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22) // and analogously for T1 <: T21 | (T221 & T222) @@ -465,7 +468,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => def isNullable(tp: Type): Boolean = tp.dealias match { case tp: TypeRef => tp.symbol.isNullableClass - case tp: RefinedType => isNullable(tp.parent) + case tp: RefinedOrRecType => isNullable(tp.parent) case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2) case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2) case _ => false @@ -494,6 +497,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isNewSubType(tp1.parent, tp2) || compareHkLambda(tp1, tp2, inOrder = true) || compareAliasedRefined(tp1, tp2, inOrder = true) + case tp1: RecType => + isNewSubType(tp1.parent, tp2) case AndType(tp11, tp12) => // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 @@ -642,6 +647,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } + /** Replace any top-level recursive type `{ z => T }` in `tp` with + * `[z := anchor]T`. + */ + private def fixRecs(anchor: SingletonType, tp: Type): Type = { + def fix(tp: Type): Type = tp.stripTypeVar match { + case tp: RecType => fix(tp.parent).substRecThis(tp, anchor) + case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo) + case tp: PolyParam => fixOrElse(bounds(tp).hi, tp) + case tp: TypeProxy => fixOrElse(tp.underlying, tp) + case tp: AndOrType => tp.derivedAndOrType(fix(tp.tp1), fix(tp.tp2)) + case tp => tp + } + def fixOrElse(tp: Type, fallback: Type) = { + val tp1 = fix(tp) + if (tp1 ne tp) tp1 else fallback + } + fix(tp) + } + /** The symbol referred to in the refinement of `rt` */ private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol @@ -772,7 +796,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** A type has been covered previously in subtype checking if it * is some combination of TypeRefs that point to classes, where the - * combiners are RefinedTypes, AndTypes or AnnotatedTypes. + * combiners are RefinedTypes, RecTypes, AndTypes or AnnotatedTypes. * One exception: Refinements referring to basetype args are never considered * to be already covered. This is necessary because such refined types might * still need to be compared with a compareAliasRefined. @@ -781,6 +805,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass case tp: ProtoType => false case tp: RefinedType => isCovered(tp.parent) && !refinedSymbol(tp).is(BaseTypeArg) + case tp: RecType => isCovered(tp.parent) case tp: AnnotatedType => isCovered(tp.underlying) case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2) case _ => false @@ -1118,6 +1143,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => NoType } + case tp1: RecType => + tp1.rebind(distributeAnd(tp1.parent, tp2)) case tp1: TypeBounds => tp2 match { case tp2: TypeBounds => tp1 & tp2 diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 72b0c87c4..54846087f 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -232,11 +232,16 @@ trait TypeOps { this: Context => // TODO: Make standalone object. } case _ => } + tp1 match { + case tp1: RecType => + tp1.rebind(approximateOr(tp1.parent, tp2)) case tp1: TypeProxy if !isClassRef(tp1) => approximateUnion(next(tp1) | tp2) case _ => tp2 match { + case tp2: RecType => + tp2.rebind(approximateOr(tp1, tp2.parent)) case tp2: TypeProxy if !isClassRef(tp2) => approximateUnion(tp1 | next(tp2)) case _ => @@ -252,16 +257,32 @@ trait TypeOps { this: Context => // TODO: Make standalone object. if (ctx.featureEnabled(defn.LanguageModuleClass, nme.keepUnions)) tp else tp match { case tp: OrType => - approximateOr(tp.tp1, tp.tp2) + approximateOr(tp.tp1, tp.tp2) // Maybe refactor using liftToRec? case tp @ AndType(tp1, tp2) => tp derived_& (approximateUnion(tp1), approximateUnion(tp2)) case tp: RefinedType => tp.derivedRefinedType(approximateUnion(tp.parent), tp.refinedName, tp.refinedInfo) + case tp: RecType => + tp.rebind(approximateUnion(tp.parent)) case _ => tp } } + /** Not currently needed: + * + def liftToRec(f: (Type, Type) => Type)(tp1: Type, tp2: Type)(implicit ctx: Context) = { + def f2(tp1: Type, tp2: Type): Type = tp2 match { + case tp2: RecType => tp2.rebind(f(tp1, tp2.parent)) + case _ => f(tp1, tp2) + } + tp1 match { + case tp1: RecType => tp1.rebind(f2(tp1.parent, tp2)) + case _ => f2(tp1, tp2) + } + } + */ + private def enterArgBinding(formal: Symbol, info: Type, cls: ClassSymbol, decls: Scope) = { val lazyInfo = new LazyType { // needed so we do not force `formal`. def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 8eae84a51..5252a9149 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -54,7 +54,8 @@ object Types { * | | +----RefinedThis * | | +--- SkolemType * | +- PolyParam - * | +- RefinedType + * | +- RefinedOrRecType -+-- RefinedType + * | | -+-- RecType * | +- TypeBounds * | +- ExprType * | +- AnnotatedType @@ -97,7 +98,7 @@ object Types { final def isStable(implicit ctx: Context): Boolean = stripTypeVar match { case tp: TermRef => tp.termSymbol.isStable && tp.prefix.isStable case _: SingletonType | NoPrefix => true - case tp: RefinedType => tp.parent.isStable + case tp: RefinedOrRecType => tp.parent.isStable case _ => false } @@ -113,7 +114,7 @@ object Types { case TypeAlias(tp) => tp.isRef(sym) case _ => this1.symbol eq sym } - case this1: RefinedType => + case this1: RefinedOrRecType => this1.parent.isRef(sym) case _ => false @@ -447,6 +448,8 @@ object Types { }) case tp: PolyParam => goParam(tp) + case tp: RecType => + goRec(tp) case tp: TypeProxy => go(tp.underlying) case tp: ClassInfo => @@ -462,6 +465,8 @@ object Types { case _ => NoDenotation } + def goRec(tp: RecType) = + go(tp.parent).mapInfo(_.substRecThis(tp, pre)) def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) val rinfo = @@ -864,6 +869,7 @@ object Types { def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName) if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK) else NoType + case tp: RecType if refinementOK => tp.parent case _ => NoType } @@ -958,6 +964,13 @@ object Types { } case _ => loop(pre.parent) } + case pre: RecType => + val candidate = loop(pre.parent) + if (candidate.exists && !pre.isReferredToBy(candidate)) { + //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}") + candidate + } + else NoType case RefinedThis(binder) => binder.lookupRefined(name) case SkolemType(tp) => @@ -1152,6 +1165,10 @@ object Types { final def substRefinedThis(binder: Type, tp: Type)(implicit ctx: Context): Type = ctx.substRefinedThis(this, binder, tp, null) + /** Substitute all occurrences of `RecThis(binder)` by `tp` */ + final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type = + ctx.substRecThis(this, binder, tp, null) + /** Substitute a bound type by some other type */ final def substParam(from: ParamType, to: Type)(implicit ctx: Context): Type = ctx.substParam(this, from, to, null) @@ -2022,7 +2039,11 @@ object Types { override def hashCode = ref.hashCode + 37 } - // --- Refined Type --------------------------------------------------------- + // --- Refined Type and RecType ------------------------------------------------ + + abstract class RefinedOrRecType extends CachedProxyType with ValueType { + def parent: Type + } /** A refined type parent { refinement } * @param refinedName The name of the refinement declaration @@ -2030,7 +2051,7 @@ object Types { * given the refined type itself. */ abstract case class RefinedType(private var myParent: Type, refinedName: Name, private var myRefinedInfo: Type) - extends CachedProxyType with BindingType with ValueType { + extends RefinedOrRecType with BindingType { final def parent = myParent final def refinedInfo = myRefinedInfo @@ -2082,7 +2103,7 @@ object Types { doHash(refinedName, refinedInfo, parent) } - override def toString = s"RefinedType($parent, $refinedName, $refinedInfo | $hashCode)" + override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)" } class CachedRefinedType(refinedName: Name) extends RefinedType(NoType, refinedName, NoType) @@ -2122,6 +2143,73 @@ object Types { } } + class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { + + val parent = parentExp(this) + + override def underlying(implicit ctx: Context): Type = parent + + def derivedRecType(parent: Type)(implicit ctx: Context): RecType = + if (parent eq this.parent) this + else RecType(rt => parent.substRecThis(this, RecThis(rt))) + + def rebind(parent: Type)(implicit ctx: Context): Type = + if (parent eq this.parent) this + else RecType.closeOver(rt => parent.substRecThis(this, RecThis(rt))) + + override def equals(other: Any) = other match { + case other: RecType => other.parent == this.parent + case _ => false + } + + def isReferredToBy(tp: Type)(implicit ctx: Context): Boolean = { + val refacc = new TypeAccumulator[Boolean] { + override def apply(x: Boolean, tp: Type) = x || { + tp match { + case tp: TypeRef => apply(x, tp.prefix) + case tp: RecThis => RecType.this eq tp.binder + case _ => foldOver(x, tp) + } + } + } + refacc.apply(false, tp) + } + + override def computeHash = doHash(parent) + + override def toString = s"RecType($parent | $hashCode)" + } + + object RecType { + def checkInst(tp: Type)(implicit ctx: Context): tp.type = { + var binders: List[RecType] = Nil + tp.foreachPart { + case rt: RecType => binders = rt :: binders + case rt: RecThis => assert(binders contains rt.binder, tp) + case _ => + } + tp + } + def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = checkInst { + var rt = new RecType(parentExp) + rt.parent match { + case rt2: RecType => + rt = rt2.derivedRecType(rt2.parent.substRecThis(rt, RecThis(rt2))) + case _ => + } + unique(rt) + } + def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = checkInst { + val rt = this(parentExp) + //val self = RecThis(rt) + def isSelfRef(t: Type) = t match { + case RecThis(binder) => binder eq rt + case _ => false + } + if (rt.existsPart(isSelfRef)) rt else rt.parent + } + } + // --- AndType/OrType --------------------------------------------------------------- trait AndOrType extends ValueType { // todo: check where we can simplify using AndOrType @@ -2587,6 +2675,22 @@ object Types { override def toString = s"RefinedThis(${binder.hashCode})" } + /** a self-reference to an enclosing recursive type. */ + case class RecThis(binder: RecType) extends BoundType with SingletonType { + type BT = RecType + override def underlying(implicit ctx: Context) = binder + def copyBoundType(bt: BT) = RecThis(bt) + + // need to customize hashCode and equals to prevent infinite recursion + // between RecTypes and RecRefs. + override def computeHash = addDelta(binder.identityHash, 41) + override def equals(that: Any) = that match { + case that: RecThis => this.binder eq that.binder + case _ => false + } + override def toString = s"RecThis(${binder.hashCode})" + } + // ----- Skolem types ----------------------------------------------- /** A skolem type reference with underlying type `binder`. */ @@ -2691,7 +2795,7 @@ object Types { } def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match { case tp: OrType => true - case tp: RefinedType => isOrType(tp.parent) + case tp: RefinedOrRecType => isOrType(tp.parent) case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2) case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi) case _ => false @@ -3160,6 +3264,8 @@ object Types { tp.derivedSelect(pre) protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type = tp.derivedRefinedType(parent, tp.refinedName, info) + protected def derivedRecType(tp: RecType, parent: Type): Type = + tp.rebind(parent) protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type = tp.derivedTypeAlias(alias) protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = @@ -3234,6 +3340,9 @@ object Types { } mapOverPoly + case tp: RecType => + derivedRecType(tp, this(tp.parent)) + case tp @ SuperType(thistp, supertp) => derivedSuperType(tp, this(thistp), this(supertp)) @@ -3335,6 +3444,9 @@ object Types { override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type) = if (parent.exists && info.exists) tp.derivedRefinedType(parent, tp.refinedName, info) else approx(hi = parent) + override protected def derivedRecType(tp: RecType, parent: Type) = + if (parent.exists) tp.rebind(parent) + else approx() override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) = if (alias.exists) tp.derivedTypeAlias(alias) else approx(NoType, TypeBounds.empty) @@ -3433,6 +3545,9 @@ object Types { variance = -variance this(y, tp.resultType) + case tp: RecType => + this(x, tp.parent) + case SuperType(thistp, supertp) => this(this(x, thistp), supertp) diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index a42958e75..e9708961a 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -103,7 +103,7 @@ Standard-Section: "ASTs" TopLevelStat* TERMREFpkg fullyQualified_NameRef TERMREF possiblySigned_NameRef qual_Type THIS clsRef_Type - REFINEDthis refinedType_ASTRef + RECthis recType_ASTRef SHARED path_ASTRef Constant = UNITconst @@ -126,6 +126,7 @@ Standard-Section: "ASTs" TopLevelStat* TYPEREFsymbol sym_ASTRef qual_Type TYPEREFpkg fullyQualified_NameRef TYPEREF possiblySigned_NameRef qual_Type + RECtype parent_Type SUPERtype Length this_Type underlying_Type REFINEDtype Length underlying_Type refinement_NameRef info_Type APPLIEDtype Length tycon_Type arg_Type* @@ -259,6 +260,7 @@ object TastyFormat { final val TERMREFpkg = 67 final val TYPEREFpkg = 68 final val REFINEDthis = 69 + final val RECthis = REFINEDthis // !!! final val BYTEconst = 70 final val SHORTconst = 71 final val CHARconst = 72 @@ -277,6 +279,7 @@ object TastyFormat { final val IMPLICITarg = 101 final val PRIVATEqualified = 102 final val PROTECTEDqualified = 103 + final val RECtype = 104 final val IDENT = 112 final val SELECT = 113 @@ -417,7 +420,7 @@ object TastyFormat { case TYPEREFdirect => "TYPEREFdirect" case TERMREFpkg => "TERMREFpkg" case TYPEREFpkg => "TYPEREFpkg" - case REFINEDthis => "REFINEDthis" + case RECthis => "RECthis" case BYTEconst => "BYTEconst" case SHORTconst => "SHORTconst" case CHARconst => "CHARconst" @@ -426,6 +429,7 @@ object TastyFormat { case FLOATconst => "FLOATconst" case DOUBLEconst => "DOUBLEconst" case STRINGconst => "STRINGconst" + case RECtype => "RECtype" case IDENT => "IDENT" case SELECT => "SELECT" diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 0cc08f2d9..9f703b5af 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -212,6 +212,11 @@ class TreePickler(pickler: TastyPickler) { val binderAddr = pickledTypes.get(tpe.binder) assert(binderAddr != null, tpe.binder) writeRef(binderAddr.asInstanceOf[Addr]) + case tpe: RecThis => + writeByte(RECthis) + val binderAddr = pickledTypes.get(tpe.binder) + assert(binderAddr != null, tpe.binder) + writeRef(binderAddr.asInstanceOf[Addr]) case tpe: SkolemType => pickleType(tpe.info) case tpe: RefinedType => @@ -221,6 +226,9 @@ class TreePickler(pickler: TastyPickler) { pickleType(tpe.parent) pickleType(tpe.refinedInfo, richTypes = true) } + case tpe: RecType => + writeByte(RECtype) + pickleType(tpe.parent) case tpe: TypeAlias => writeByte(TYPEALIAS) withLength { diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 2b8e5f019..1b4e7845a 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -335,8 +335,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { } case THIS => ThisType.raw(readType().asInstanceOf[TypeRef]) + case RECtype => + RecType(rt => registeringType(rt, readType())) case REFINEDthis => - RefinedThis(readTypeRef().asInstanceOf[RefinedType]) + readTypeRef() match { + case t: RefinedType => RefinedThis(t) + case t: RecType => RecThis(t) + } case SHARED => val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 687e9279b..2663777af 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -722,13 +722,12 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val parent = parents.reduceLeft(AndType(_, _)) if (decls.isEmpty) parent else { - def addRefinement(tp: Type, sym: Symbol) = { - def subst(info: Type, rt: RefinedType) = - if (clazz.isClass) info.substThis(clazz.asClass, RefinedThis(rt)) - else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case. - RefinedType(tp, sym.name, subst(sym.info, _)) - } - (parent /: decls.toList)(addRefinement).asInstanceOf[RefinedType] + def subst(info: Type, rt: RecType) = + if (clazz.isClass) info.substThis(clazz.asClass, RecThis(rt)) + else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case. + def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info) + val refined = (parent /: decls.toList)(addRefinement) + RecType.closeOver(rt => subst(refined, rt)) } case CLASSINFOtpe => val clazz = readSymbolRef() diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 7053a0ea3..59f1608db 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -13,6 +13,8 @@ import scala.annotation.switch class PlainPrinter(_ctx: Context) extends Printer { protected[this] implicit def ctx: Context = _ctx.addMode(Mode.Printing) + private var openRecs: List[RecType] = Nil + protected def maxToTextRecursions = 100 protected final def controlled(op: => Text): Text = @@ -58,6 +60,8 @@ class PlainPrinter(_ctx: Context) extends Printer { } else tp + private def selfRecName(n: Int) = s"z$n" + /** Render elements alternating with `sep` string */ protected def toText(elems: Traversable[Showable], sep: String) = Text(elems map (_ toText this), sep) @@ -130,6 +134,12 @@ class PlainPrinter(_ctx: Context) extends Printer { val parent :: (refined: List[RefinedType @unchecked]) = refinementChain(tp).reverse toTextLocal(parent) ~ "{" ~ Text(refined map toTextRefinement, "; ").close ~ "}" + case tp: RecType => + try { + openRecs = tp :: openRecs + "{" ~ selfRecName(openRecs.length) ~ " => " ~ toTextGlobal(tp.parent) ~ "}" + } + finally openRecs = openRecs.tail case AndType(tp1, tp2) => changePrec(AndPrec) { toText(tp1) ~ " & " ~ toText(tp2) } case OrType(tp1, tp2) => @@ -232,6 +242,10 @@ class PlainPrinter(_ctx: Context) extends Printer { toText(value) case MethodParam(mt, idx) => nameString(mt.paramNames(idx)) + case tp: RecThis => + val idx = openRecs.reverse.indexOf(tp.binder) + if (idx >= 0) selfRecName(idx + 1) + else "{...}.this" // TODO move underlying type to an addendum, e.g. ... z3 ... where z3: ... case tp: RefinedThis => s"${nameString(tp.binder.typeSymbol)}{...}.this" case tp: SkolemType => diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 37753fe65..6944197a1 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -174,6 +174,8 @@ object Checking { mapOver(tp) case tp @ RefinedType(parent, name, rinfo) => tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) + case tp: RecType => + tp.rebind(this(tp.parent)) case tp @ TypeRef(pre, name) => try { // A prefix is interesting if it might contain (transitively) a reference @@ -187,7 +189,7 @@ object Checking { case SuperType(thistp, _) => isInteresting(thistp) case AndType(tp1, tp2) => isInteresting(tp1) || isInteresting(tp2) case OrType(tp1, tp2) => isInteresting(tp1) && isInteresting(tp2) - case _: RefinedType => true + case _: RefinedOrRecType => true case _ => false } if (isInteresting(pre)) { @@ -433,12 +435,14 @@ trait Checking { } /** Check that any top-level type arguments in this type are feasible, i.e. that - * their lower bound conforms to their upper cound. If a type argument is + * their lower bound conforms to their upper bound. If a type argument is * infeasible, issue and error and continue with upper bound. */ def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp match { case tp: RefinedType => tp.derivedRefinedType(tp.parent, tp.refinedName, checkFeasible(tp.refinedInfo, pos, where)) + case tp: RecType => + tp.rebind(tp.parent) case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => ctx.error(d"no type exists between low bound $lo and high bound $hi$where", pos) TypeAlias(hi) diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index b686e6eed..47c3631b8 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -430,8 +430,8 @@ trait TypeAssigner { val argBindingFns = tparams.map(tparam => tparam.info.bounds .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .internalizeFrom(tparams)) - val bodyFn = body.tpe.internalizeFrom(tparams) + .recursify(tparams)) + val bodyFn = body.tpe.recursify(tparams) tree.withType(TypeApplicationsNewHK.TypeLambda(argBindingFns, bodyFn)) } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index d5a32dbc0..fb3418563 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -917,11 +917,12 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos) val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info - RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, RefinedThis(rt))) + RefinedType(parent, rsym.name, rinfo) // todo later: check that refinement is within bounds } - val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1) withType - (tpt1.tpe /: refinements1)(addRefinement) + val refined = (tpt1.tpe /: refinements1)(addRefinement) + val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1).withType( + RecType.closeOver(rt => refined.substThis(refineCls, RecThis(rt)))) typr.println(i"typed refinement: ${res.tpe}") res } diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala index bc9730140..e88423f98 100644 --- a/src/dotty/tools/dotc/typer/Variances.scala +++ b/src/dotty/tools/dotc/typer/Variances.scala @@ -77,6 +77,8 @@ object Variances { else flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam) case tp @ RefinedType(parent, _, rinfo) => varianceInType(parent)(tparam) & varianceInType(rinfo)(tparam) + case tp: RecType => + varianceInType(tp.parent)(tparam) case tp @ MethodType(_, paramTypes) => flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam) case ExprType(restpe) => diff --git a/tests/pending/pos/apply-equiv.scala b/tests/pending/pos/apply-equiv.scala new file mode 100644 index 000000000..f53b8b5ab --- /dev/null +++ b/tests/pending/pos/apply-equiv.scala @@ -0,0 +1,14 @@ +class Test { + + class Lambda { type Arg; type Apply } + + type T1 = (Lambda { type Arg = Int } { type Apply = List[Arg] }) # Apply + type T2 = List[Int] + + var x: T1 = _ + var y: T2 = _ + + x = y + y = x + +} diff --git a/tests/pos/apply-equiv.scala b/tests/pos/apply-equiv.scala deleted file mode 100644 index f53b8b5ab..000000000 --- a/tests/pos/apply-equiv.scala +++ /dev/null @@ -1,14 +0,0 @@ -class Test { - - class Lambda { type Arg; type Apply } - - type T1 = (Lambda { type Arg = Int } { type Apply = List[Arg] }) # Apply - type T2 = List[Int] - - var x: T1 = _ - var y: T2 = _ - - x = y - y = x - -} diff --git a/tests/pos/lookuprefined.scala b/tests/pos/lookuprefined.scala index f7e7f7337..9dd2b4abb 100644 --- a/tests/pos/lookuprefined.scala +++ b/tests/pos/lookuprefined.scala @@ -2,7 +2,9 @@ class C { type T; type U } trait Test { - val x: (C { type U = T } { type T = String }) # U - val y: String = x + val x1: (C { type U = T; type T = String }) # U + val x2: (C { type U = T } {type T = String }) # U + val y1: String = x1 + val y2: String = x2 } -- cgit v1.2.3 From af43d325b778973ad9e144b5c27c455febb98890 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:05:20 +0200 Subject: Abstract type parameters out from type symbols In the new hk scheme, a type parameter can be represented by a refinement without a corresponding symbol. Therefore, we need to disentangle the info inherent in a type parameter from the contents of a type symbol. We achieve this by creating a common super trait "MemerInfo" of Symbol and RefinedType. --- src/dotty/tools/dotc/core/MemberBinding.scala | 34 +++++++++++++++++ src/dotty/tools/dotc/core/Symbols.scala | 9 ++++- src/dotty/tools/dotc/core/TypeApplications.scala | 32 +++++++++------- src/dotty/tools/dotc/core/TypeComparer.scala | 43 +++++++++++++--------- src/dotty/tools/dotc/core/Types.scala | 19 ++++++++-- .../dotc/core/classfile/ClassfileParser.scala | 2 +- src/dotty/tools/dotc/typer/Implicits.scala | 2 +- src/dotty/tools/dotc/typer/Namer.scala | 2 +- src/dotty/tools/dotc/typer/TypeAssigner.scala | 4 +- src/dotty/tools/dotc/typer/Typer.scala | 6 +-- 10 files changed, 110 insertions(+), 43 deletions(-) create mode 100644 src/dotty/tools/dotc/core/MemberBinding.scala diff --git a/src/dotty/tools/dotc/core/MemberBinding.scala b/src/dotty/tools/dotc/core/MemberBinding.scala new file mode 100644 index 000000000..6f081c542 --- /dev/null +++ b/src/dotty/tools/dotc/core/MemberBinding.scala @@ -0,0 +1,34 @@ +package dotty.tools.dotc.core + +import Names.Name +import Contexts.Context +import Types.{Type, TypeBounds} + +/** A common super trait of Symbol and Refinement. + * Used to capture the attributes of type parameters + * which can be implemented as either symbols or refinements. + */ +trait MemberBinding { + + /** Does this binding represent a type parameter? + * Only in that case the rest of the binding's methods are significant. + */ + def isTypeParam(implicit ctx: Context): Boolean + + /** The name of the member */ + def memberName(implicit ctx: Context): Name + + /** The info of the member */ + def memberBounds(implicit ctx: Context): TypeBounds + + /** The info of the member as seen from a prefix type. + * This can be different from `memberInfo` if the binding + * is a type symbol of a class. + */ + def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds + + /** The variance of the type parameter + * @pre: isTypeParam = true + */ + def memberVariance(implicit ctx: Context): Int +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala index 1b605e24f..c7eb54812 100644 --- a/src/dotty/tools/dotc/core/Symbols.scala +++ b/src/dotty/tools/dotc/core/Symbols.scala @@ -367,7 +367,7 @@ object Symbols { * @param coord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ - class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with printing.Showable { + class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with MemberBinding with printing.Showable { type ThisName <: Name @@ -489,6 +489,13 @@ object Symbols { */ def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition + // MemberBinding methods + def isTypeParam(implicit ctx: Context) = denot.is(TypeParam) + def memberName(implicit ctx: Context): Name = name + def memberBounds(implicit ctx: Context) = denot.info.bounds + def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds + def memberVariance(implicit ctx: Context) = denot.variance + // -------- Printing -------------------------------------------------------- /** The prefix string to be used when displaying this symbol without denotation */ diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index bd115fefb..d9521b3c8 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -75,10 +75,10 @@ object TypeApplications { /** Does the variance of `sym1` conform to the variance of `sym2`? * This is the case if the variances are the same or `sym` is nonvariant. */ - def varianceConforms(sym1: TypeSymbol, sym2: TypeSymbol)(implicit ctx: Context) = - sym1.variance == sym2.variance || sym2.variance == 0 + def varianceConforms(sym1: MemberBinding, sym2: MemberBinding)(implicit ctx: Context) = + sym1.memberVariance == sym2.memberVariance || sym2.memberVariance == 0 - def variancesConform(syms1: List[TypeSymbol], syms2: List[TypeSymbol])(implicit ctx: Context) = + def variancesConform(syms1: List[MemberBinding], syms2: List[MemberBinding])(implicit ctx: Context) = syms1.corresponds(syms2)(varianceConforms) /** Extractor for @@ -143,7 +143,7 @@ object TypeApplications { object EtaExpansion { def apply(tycon: TypeRef)(implicit ctx: Context) = { assert(tycon.isEtaExpandable) - tycon.EtaExpand(tycon.typeParams) + tycon.EtaExpand(tycon.typeParamSymbols) } def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { @@ -280,7 +280,7 @@ class TypeApplications(val self: Type) extends AnyVal { * with the bounds on its hk args. See `LambdaAbstract`, where these * types get introduced, and see `isBoundedLambda` below for the test. */ - final def typeParams(implicit ctx: Context): List[TypeSymbol] = /*>|>*/ track("typeParams") /*<|<*/ { + final def typeParams(implicit ctx: Context): List[MemberBinding] = /*>|>*/ track("typeParams") /*<|<*/ { self match { case self: ClassInfo => self.cls.typeParams @@ -309,7 +309,7 @@ class TypeApplications(val self: Type) extends AnyVal { val sym = self.parent.classSymbol if (sym.isLambdaTrait) return sym.typeParams } - self.parent.typeParams.filterNot(_.name == self.refinedName) + self.parent.typeParams.filterNot(_.memberName == self.refinedName) case self: SingletonType => Nil case self: TypeProxy => @@ -319,6 +319,12 @@ class TypeApplications(val self: Type) extends AnyVal { } } + final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = { + val tparams = typeParams + assert(tparams.isEmpty || tparams.head.isInstanceOf[Symbol]) + tparams.asInstanceOf[List[TypeSymbol]] + } + /** The named type parameters declared or inherited by this type. * These are all uninstantiated named type parameters of this type or one * of its base types. @@ -498,7 +504,7 @@ class TypeApplications(val self: Type) extends AnyVal { * v1 is compatible with v2, if v1 = v2 or v2 is non-variant. */ def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = { - val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams + val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParamSymbols self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse) //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } @@ -508,7 +514,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: RefinedType => self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) case _ => - self.EtaExpand(self.typeParams) + self.EtaExpand(self.typeParamSymbols) } /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ @@ -621,12 +627,12 @@ class TypeApplications(val self: Type) extends AnyVal { * @param args = `U1, ..., Un` * @param tparams are assumed to be the type parameters of `T`. */ - final def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = { - def matchParams(t: Type, tparams: List[TypeSymbol], args: List[Type])(implicit ctx: Context): Type = args match { + final def appliedTo(args: List[Type], typParams: List[MemberBinding])(implicit ctx: Context): Type = { + def matchParams(t: Type, tparams: List[MemberBinding], args: List[Type])(implicit ctx: Context): Type = args match { case arg :: args1 => try { val tparam :: tparams1 = tparams - matchParams(RefinedType(t, tparam.name, arg.toBounds(tparam)), tparams1, args1) + matchParams(RefinedType(t, tparam.memberName, arg.toBounds(tparam)), tparams1, args1) } catch { case ex: MatchError => println(s"applied type mismatch: $self $args, typeParams = $typParams") // !!! DEBUG @@ -667,11 +673,11 @@ class TypeApplications(val self: Type) extends AnyVal { /** Turn this type, which is used as an argument for * type parameter `tparam`, into a TypeBounds RHS */ - final def toBounds(tparam: Symbol)(implicit ctx: Context): TypeBounds = self match { + final def toBounds(tparam: MemberBinding)(implicit ctx: Context): TypeBounds = self match { case self: TypeBounds => // this can happen for wildcard args self case _ => - val v = tparam.variance + val v = tparam.memberVariance /* Not neeeded. if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self) else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 9909c9e8a..58c6bea3a 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -534,7 +534,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * - the type parameters of `B` match one-by-one the variances of `tparams`, * - `B` satisfies predicate `p`. */ - private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeSymbol], p: Type => Boolean): Boolean = { + private def testLifted(tp1: Type, tp2: Type, tparams: List[MemberBinding], p: Type => Boolean): Boolean = { val classBounds = tp2.member(tpnme.hkApply).info.classSymbols def recur(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => @@ -647,7 +647,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } - /** Replace any top-level recursive type `{ z => T }` in `tp` with + /** Replace any top-level recursive type `{ z => T }` in `tp` with * `[z := anchor]T`. */ private def fixRecs(anchor: SingletonType, tp: Type): Type = { @@ -726,24 +726,31 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val rebindNeeded = tp2.refinementRefersToThis val base = if (rebindNeeded) ensureStableSingleton(tp1) else tp1 val rinfo2 = if (rebindNeeded) tp2.refinedInfo.substRefinedThis(tp2, base) else tp2.refinedInfo + val mbr = base.member(name) + def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2) - def memberMatches(mbr: Denotation): Boolean = mbr match { // inlined hasAltWith for performance + + def memberMatches: Boolean = mbr match { // inlined hasAltWith for performance case mbr: SingleDenotation => qualifies(mbr) case _ => mbr hasAltWith qualifies } - /*>|>*/ ctx.traceIndented(i"hasMatchingMember($base . $name :? ${tp2.refinedInfo}) ${base.member(name).info.show} $rinfo2", subtyping) /*<|<*/ { - memberMatches(base member name) || - tp1.isInstanceOf[SingletonType] && - { // special case for situations like: - // class C { type T } - // val foo: C - // foo.type <: C { type T = foo.T } - rinfo2 match { - case rinfo2: TypeAlias => - !defn.isBottomType(base.widen) && (base select name) =:= rinfo2.alias - case _ => false - } - } + + // special case for situations like: + // class C { type T } + // val foo: C + // foo.type <: C { type T = foo.T } + def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && { + rinfo2 match { + case rinfo2: TypeAlias => + !defn.isBottomType(base.widen) && (base select name) =:= rinfo2.alias + case _ => false + } + } + + def varianceMatches = true // TODO: fill in + + /*>|>*/ ctx.traceIndented(i"hasMatchingMember($base . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ { + (memberMatches || selfReferentialMatch) && varianceMatches } } @@ -1117,8 +1124,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * in where we allow which interpretation. */ private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = { - val tparams1 = tp1.typeParams - val tparams2 = tp2.typeParams + val tparams1 = tp1.typeParamSymbols // TODO revise for new hk scheme + val tparams2 = tp2.typeParamSymbols def onlyNamed(tparams: List[TypeSymbol]) = tparams.forall(!_.is(ExpandedName)) if (tparams1.isEmpty || tparams2.isEmpty || onlyNamed(tparams1) && onlyNamed(tparams2)) op(tp1, tp2) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 5252a9149..cba13ef81 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -869,7 +869,7 @@ object Types { def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName) if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK) else NoType - case tp: RecType if refinementOK => tp.parent + case tp: RecType if refinementOK => tp.parent case _ => NoType } @@ -2051,7 +2051,7 @@ object Types { * given the refined type itself. */ abstract case class RefinedType(private var myParent: Type, refinedName: Name, private var myRefinedInfo: Type) - extends RefinedOrRecType with BindingType { + extends RefinedOrRecType with BindingType with MemberBinding { final def parent = myParent final def refinedInfo = myRefinedInfo @@ -2090,6 +2090,16 @@ object Types { if (parent.member(refinedName).exists) derivedRefinedType(parent, refinedName, refinedInfo) else parent + // MemberBinding methods + def isTypeParam(implicit ctx: Context) = refinedInfo match { + case tp: TypeBounds => tp.isBinding + case _ => false + } + def memberName(implicit ctx: Context) = refinedName + def memberBounds(implicit ctx: Context) = refinedInfo.bounds + def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = memberBounds + def memberVariance(implicit ctx: Context) = BindingKind.toVariance(refinedInfo.bounds.bindingKind) + override def equals(that: Any) = that match { case that: RefinedType => this.parent == that.parent && @@ -3120,7 +3130,10 @@ object Types { object BindingKind { def fromVariance(v: Int): BindingKind = new BindingKind((v + NonvariantBinding.n).toByte) - def toVariance(bk: BindingKind): Int = bk.n + def toVariance(bk: BindingKind): Int = { + assert(bk.n != 0) + bk.n - NonvariantBinding.n + } } // ----- Annotated and Import types ----------------------------------------------- diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index f7a69aa53..2d7b037b1 100644 --- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -272,7 +272,7 @@ class ClassfileParser( if (sig(index) == '<') { accept('<') var tp1: Type = tp - var formals = tp.typeParams + var formals = tp.typeParamSymbols while (sig(index) != '>') { sig(index) match { case variance @ ('+' | '-' | '*') => diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index 7de40294d..e21a08fb8 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -325,7 +325,7 @@ trait ImplicitRunInfo { self: RunInfo => } def addParentScope(parent: TypeRef): Unit = { iscopeRefs(parent) foreach addRef - for (param <- parent.typeParams) + for (param <- parent.typeParamSymbols) comps ++= iscopeRefs(tp.member(param.name).info) } val companion = cls.companionModule diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index a8f3b8918..8437b651c 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -989,7 +989,7 @@ class Namer { typer: Typer => if (args.nonEmpty) { val tycon = tp.withoutArgs(args) val tycon1 = this(tycon) - val tparams = tycon.typeParams + val tparams = tycon.typeParamSymbols val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1) } else mapOver(tp) diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index 47c3631b8..b7e2fd832 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -409,11 +409,11 @@ trait TypeAssigner { def refineNamed(tycon: Type, arg: Tree) = arg match { case ast.Trees.NamedArg(name, argtpt) => // Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error - val tparam = tparams.find(_.name == name) match { + val tparam = tparams.find(_.memberName == name) match { case Some(tparam) => tparam case none => ntparams.find(_.name == name).getOrElse(NoSymbol) } - if (tparam.exists) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam)) + if (tparam.isTypeParam) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam)) else errorType(i"$tycon does not have a parameter or abstract type member named $name", arg.pos) case _ => errorType(s"named and positional type arguments may not be mixed", arg.pos) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index fb3418563..225516503 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -943,14 +943,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) args = args.take(tparams.length) } - def typedArg(arg: untpd.Tree, tparam: Symbol) = { + def typedArg(arg: untpd.Tree, tparam: MemberBinding) = { val (desugaredArg, argPt) = if (ctx.mode is Mode.Pattern) - (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.info) + (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.memberBounds) else (arg, WildcardType) val arg1 = typed(desugaredArg, argPt) - adaptTypeArg(arg1, tparam.info) + adaptTypeArg(arg1, tparam.memberBounds) } args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] } -- cgit v1.2.3 From aa7f66d3f7d299733158a08bad5ac0d746497d81 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:06:13 +0200 Subject: Disable checkInst in RecType It can give false negatives. Also, simplify RecType.closeOver --- src/dotty/tools/dotc/core/Types.scala | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index cba13ef81..42abc4251 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2191,16 +2191,20 @@ object Types { } object RecType { + /* Note: this might well fail for nested Recs. + * Failing scenario: Rebind a nest rec, creates a new rec + * but it still has RecThis references to the outer rec. def checkInst(tp: Type)(implicit ctx: Context): tp.type = { var binders: List[RecType] = Nil tp.foreachPart { case rt: RecType => binders = rt :: binders - case rt: RecThis => assert(binders contains rt.binder, tp) + case rt: RecThis => assert(binders contains rt.binder) case _ => } tp } - def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = checkInst { + */ + def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = { var rt = new RecType(parentExp) rt.parent match { case rt2: RecType => @@ -2209,14 +2213,9 @@ object Types { } unique(rt) } - def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = checkInst { + def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = { val rt = this(parentExp) - //val self = RecThis(rt) - def isSelfRef(t: Type) = t match { - case RecThis(binder) => binder eq rt - case _ => false - } - if (rt.existsPart(isSelfRef)) rt else rt.parent + if (rt.isReferredToBy(rt.parent)) rt else rt.parent } } -- cgit v1.2.3 From 08a0ea65b911726b327a9caf36e0e48acb5c5e93 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:07:28 +0200 Subject: Add printing of type lambda trees Also fix printing of variances in typedefs and params; they were suppressed before. --- src/dotty/tools/dotc/ast/tpd.scala | 2 +- src/dotty/tools/dotc/core/Flags.scala | 3 ++- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 15 +++++++++++---- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala index eff054030..4593b9554 100644 --- a/src/dotty/tools/dotc/ast/tpd.scala +++ b/src/dotty/tools/dotc/ast/tpd.scala @@ -21,7 +21,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { private def ta(implicit ctx: Context) = ctx.typeAssigner def Modifiers(sym: Symbol)(implicit ctx: Context): Modifiers = Modifiers( - sym.flags & ModifierFlags, + sym.flags & (if (sym.isType) ModifierFlags | VarianceFlags else ModifierFlags), if (sym.privateWithin.exists) sym.privateWithin.asType.name else tpnme.EMPTY, sym.annotations map (_.tree)) diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index cd660aa46..bdd6bbdcf 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -436,7 +436,8 @@ object Flags { /** Flags representing modifiers that can appear in trees */ final val ModifierFlags = - SourceModifierFlags | Module | Param | Synthetic | Package | Local | commonFlags(Mutable) + SourceModifierFlags | Module | Param | Synthetic | Package | Local | + commonFlags(Mutable) // | Trait is subsumed by commonFlags(Lazy) from SourceModifierFlags assert(ModifierFlags.isTermFlags && ModifierFlags.isTypeFlags) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index e0fd47900..1020468a9 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -123,12 +123,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def contains(tp1: Type, tp2: Type): Boolean = tp1.eq(tp2) || { tp1.stripTypeVar match { - case tp1: RefinedType => contains(tp1.parent, tp2) + case tp1: RefinedOrRecType => contains(tp1.parent, tp2) case _ => false } } def apply(t: Type): Type = t match { - case TypeRef(RefinedThis(rt), name) if name.isHkArgName && contains(tp, rt) => + case TypeRef(RecThis(rt), name) if name.isHkArgName && contains(tp, rt) => // Make up a name that prints as "Xi". Need to be careful we do not // accidentally unique-hash to something else. That's why we can't // use prefix = NoPrefix or a WithFixedSym instance. @@ -207,7 +207,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { import untpd.{modsDeco => _, _} - /** Print modifiers form symbols if tree has type, overriding the untpd behavior. */ + /** Print modifiers from symbols if tree has type, overriding the untpd behavior. */ implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco = tpd.modsDeco(mdef.asInstanceOf[tpd.MemberDef]).asInstanceOf[untpd.ModsDeco] @@ -264,6 +264,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw) } + def varianceText(mods: untpd.Modifiers) = + if (mods is Covariant) "+" + else if (mods is Contravariant) "-" + else "" + def argText(arg: Tree): Text = arg match { case arg: TypeBoundsTree => "_" ~ toTextGlobal(arg) case arg: TypeTree => @@ -398,6 +403,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toTextLocal(tpt) ~ " " ~ blockText(refines) case AppliedTypeTree(tpt, args) => toTextLocal(tpt) ~ "[" ~ Text(args map argText, ", ") ~ "]" + case TypeLambdaTree(tparams, body) => + tparamsText(tparams) ~ " -> " ~ toText(body) case ByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi) => @@ -431,7 +438,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tree @ TypeDef(name, rhs) => def typeDefText(rhsText: Text) = dclTextOr { - modText(tree.mods, "type") ~~ nameIdText(tree) ~ + modText(tree.mods, "type") ~~ (varianceText(tree.mods) ~ nameIdText(tree)) ~ withEnclosingDef(tree) { val rhsText1 = if (tree.hasType) toText(tree.symbol.info) else rhsText tparamsText(tree.tparams) ~ rhsText1 -- cgit v1.2.3 From 178e90e441481364f19163a9dad624a4d859fb1b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 27 May 2016 23:25:26 +0200 Subject: Remove special case in parser There was a special case that triggered a parse error in this course def lift[T <: Type](tp: T): (RecType => T) = arg match { case rt0: RecType => tp.subst(rt0, _).asInstanceOf[T] case _ => (x => tp) } The problem was that the rhs of the first case became a Function node, which caused a premature return from the case clause sequence. I could not determine anymore what the purpose of the removed case in the parser was; all tests compile without it. --- src/dotty/tools/dotc/parsing/Parsers.scala | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index 0cc392bad..51b681c0e 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2138,17 +2138,10 @@ object Parsers { var exitOnError = false while (!isStatSeqEnd && in.token != CASE && !exitOnError) { setLastStatOffset() - if (in.token == IMPORT) { + if (in.token == IMPORT) stats ++= importClause() - } - else if (isExprIntro) { - val t = expr(Location.InBlock) - stats += t - t match { - case _: Function => return stats.toList - case _ => - } - } + else if (isExprIntro) + stats += expr(Location.InBlock) else if (isDefIntro(localModifierTokens)) if (in.token == IMPLICIT) { val start = in.skipToken() -- cgit v1.2.3 From 4bf43f82c88dbeb0578e289b37ce1a7580aa22f2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:09:43 +0200 Subject: Turn on new hk scheme For the moment under newHK flag. - avoid crasher in derivedTypeParams (NamedTypes don't always have symbols) - Revise logic in type comparer for new HK scheme --- src/dotty/tools/dotc/ast/Desugar.scala | 2 +- src/dotty/tools/dotc/config/Config.scala | 2 +- src/dotty/tools/dotc/core/Definitions.scala | 10 +- src/dotty/tools/dotc/core/NameOps.scala | 10 +- src/dotty/tools/dotc/core/StdNames.scala | 10 +- src/dotty/tools/dotc/core/SymDenotations.scala | 4 +- src/dotty/tools/dotc/core/TypeApplications.scala | 341 ++++++++++++++------- src/dotty/tools/dotc/core/TypeComparer.scala | 144 +++++++-- src/dotty/tools/dotc/core/TypeOps.scala | 3 +- src/dotty/tools/dotc/core/Types.scala | 34 +- .../tools/dotc/core/tasty/TreeUnpickler.scala | 4 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 2 +- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 +- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 3 +- src/dotty/tools/dotc/typer/TypeAssigner.scala | 2 +- 15 files changed, 404 insertions(+), 169 deletions(-) diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala index f603f6817..a9705e209 100644 --- a/src/dotty/tools/dotc/ast/Desugar.scala +++ b/src/dotty/tools/dotc/ast/Desugar.scala @@ -66,7 +66,7 @@ object desugar { val relocate = new TypeMap { val originalOwner = sym.owner def apply(tp: Type) = tp match { - case tp: NamedType if tp.symbol.owner eq originalOwner => + case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => val defctx = ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next var local = defctx.denotNamed(tp.name).suchThat(_ is ParamOrAccessor).symbol if (local.exists) (defctx.owner.thisType select local).dealias diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index be8a367d7..63ecdc76d 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -8,7 +8,7 @@ object Config { final val cacheMemberNames = true final val cacheImplicitScopes = true - final val newHK = false + final val newHK = true final val checkCacheMembersNamed = false diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 5cb373cfd..1311bb583 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -668,7 +668,7 @@ class Definitions { private var myLambdaTraits: Set[Symbol] = Set() /** The set of HigherKindedXYZ traits encountered so far */ - def lambdaTraits: Set[Symbol] = myLambdaTraits + def lambdaTraitsOBS: Set[Symbol] = myLambdaTraits private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() @@ -689,7 +689,7 @@ class Definitions { * - for each positive or negative variance v_i there is a parent trait Pj which * is the same as LambdaXYZ except that it has `I` in i-th position. */ - def LambdaTrait(vcs: List[Int]): ClassSymbol = { + def LambdaTraitOBS(vcs: List[Int]): ClassSymbol = { assert(vcs.nonEmpty) def varianceFlags(v: Int) = v match { @@ -704,16 +704,16 @@ class Definitions { val paramDecls = newScope for (i <- 0 until vcs.length) newTypeParam(cls, tpnme.hkArg(i), varianceFlags(vcs(i)), paramDecls) - newTypeField(cls, tpnme.hkApply, Covariant, paramDecls) + newTypeField(cls, tpnme.hkApplyOBS, Covariant, paramDecls) val parentTraitRefs = for (i <- 0 until vcs.length if vcs(i) != 0) - yield LambdaTrait(vcs.updated(i, 0)).typeRef + yield LambdaTraitOBS(vcs.updated(i, 0)).typeRef denot.info = ClassInfo( ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls) } } - val traitName = tpnme.hkLambda(vcs) + val traitName = tpnme.hkLambdaOBS(vcs) def createTrait = { val cls = newClassSymbol( diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index 17af899e9..1f221b5c8 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -116,12 +116,12 @@ object NameOps { def hkArgIndex: Int = name.drop(tpnme.hkArgPrefixLength).toString.toInt - def isLambdaTraitName(implicit ctx: Context): Boolean = - name.isTypeName && name.startsWith(tpnme.hkLambdaPrefix) + def isLambdaTraitNameOBS(implicit ctx: Context): Boolean = + name.isTypeName && name.startsWith(tpnme.hkLambdaPrefixOBS) - def lambdaTraitVariances(implicit ctx: Context): List[Int] = { - val vs = name.drop(tpnme.hkLambdaPrefix.length) - vs.toList.map(c => tpnme.varianceSuffixes.indexOf(c) - 1) + def lambdaTraitVariancesOBS(implicit ctx: Context): List[Int] = { + val vs = name.drop(tpnme.hkLambdaPrefixOBS.length) + vs.toList.map(c => tpnme.varianceSuffixesOBS.indexOf(c) - 1) } /** If the name ends with $nn where nn are diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 81f6da0e2..e82260201 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -529,9 +529,9 @@ object StdNames { val synthSwitch: N = "$synthSwitch" - val hkApply: N = "$Apply" + val hkApplyOBS: N = "$Apply" val hkArgPrefix: N = "$hk" - val hkLambdaPrefix: N = "Lambda$" + val hkLambdaPrefixOBS: N = "Lambda$" val hkArgPrefixHead: Char = hkArgPrefix.head val hkArgPrefixLength: Int = hkArgPrefix.length @@ -744,11 +744,11 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def hkLambda(vcs: List[Int]): TypeName = hkLambdaPrefix ++ vcs.map(varianceSuffix).mkString + def hkLambdaOBS(vcs: List[Int]): TypeName = hkLambdaPrefixOBS ++ vcs.map(varianceSuffixOBS).mkString def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString - def varianceSuffix(v: Int): Char = varianceSuffixes.charAt(v + 1) - val varianceSuffixes = "NIP" + def varianceSuffixOBS(v: Int): Char = varianceSuffixesOBS.charAt(v + 1) + val varianceSuffixesOBS = "NIP" final val Conforms = encode("<:<") } diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 7715885c4..8fefdf7a7 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -479,8 +479,8 @@ object SymDenotations { name.decode == tpnme.REFINE_CLASS /** is this symbol a trait representing a type lambda? */ - final def isLambdaTrait(implicit ctx: Context): Boolean = - isClass && name.startsWith(tpnme.hkLambdaPrefix) && owner == defn.ScalaPackageClass + final def isLambdaTraitOBS(implicit ctx: Context): Boolean = + isClass && name.startsWith(tpnme.hkLambdaPrefixOBS) && owner == defn.ScalaPackageClass /** Is this symbol a package object or its module class? */ def isPackageObject(implicit ctx: Context): Boolean = { diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d9521b3c8..9ceae6e5f 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -15,43 +15,9 @@ import StdNames.tpnme import util.Positions.Position import config.Printers._ import collection.mutable - import dotty.tools.dotc.config.Config +import dotty.tools.dotc.config.Config import java.util.NoSuchElementException -object TypeApplicationsNewHK { - import TypeApplications._ - - object TypeLambda { - def apply(argBindingFns: List[RecType => TypeBounds], - bodyFn: RecType => Type)(implicit ctx: Context): Type = { - val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) - var idx = 0 - RecType.closeOver(rt => - (bodyFn(rt) /: argBindingFns) { (parent, argBindingFn) => - val res = RefinedType(parent, tpnme.hkArg(idx), argBindingFn(rt)) - idx += 1 - res - }) - } - - def unapply(tp: Type)(implicit ctx: Context): Option[(List[TypeBounds], Type)] = { - def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { - case t @ RefinedType(p, rname, rinfo: TypeBounds) - if rname.isHkArgName && rinfo.isBinding => - decompose(p, rinfo.bounds :: acc) - case t: RecType => - decompose(t.parent, acc) - case _ => - (acc, t) - } - decompose(tp, Nil) match { - case (Nil, _) => None - case x => Some(x) - } - } - } -} - object TypeApplications { /** Assert type is not a TypeBounds instance and return it unchanged */ @@ -81,22 +47,42 @@ object TypeApplications { def variancesConform(syms1: List[MemberBinding], syms2: List[MemberBinding])(implicit ctx: Context) = syms1.corresponds(syms2)(varianceConforms) + def fallbackTypeParams(n: Int)(implicit ctx: Context): List[MemberBinding] = { + def memberBindings(n: Int): Type = + if (n == 0) NoType + else + RefinedType( + memberBindings(n - 1), + tpnme.hkArg(n - 1), + TypeBounds.empty.withBindingKind(NonvariantBinding)) + def decompose(t: Type, acc: List[MemberBinding]): List[MemberBinding] = t match { + case t: RefinedType => decompose(t.parent, t :: acc) + case NoType => acc + } + decompose(memberBindings(n), Nil) + } + /** Extractor for * * [v1 X1: B1, ..., vn Xn: Bn] -> T * ==> * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } - * - * [X] -> List[X] - * - * List { type List$A = this.$hk_0 } { type $hk_0 } - * - * [X] -> X - * - * mu(this) this.$hk_0 & { type $hk_0 } */ object TypeLambda { - def apply(variances: List[Int], + def apply(argBindingFns: List[RecType => TypeBounds], + bodyFn: RecType => Type)(implicit ctx: Context): Type = { + assert(Config.newHK) + val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) + var idx = 0 + RecType.closeOver(rt => + (bodyFn(rt) /: argBindingFns) { (parent, argBindingFn) => + val res = RefinedType(parent, tpnme.hkArg(idx), argBindingFn(rt)) + idx += 1 + res + }) + } + + def applyOBS(variances: List[Int], argBoundsFns: List[RefinedType => TypeBounds], bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { def argRefinements(parent: Type, i: Int, bs: List[RefinedType => TypeBounds]): Type = bs match { @@ -108,26 +94,42 @@ object TypeApplications { assert(variances.nonEmpty) assert(argBoundsFns.length == variances.length) RefinedType( - argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundsFns), - tpnme.hkApply, bodyFn(_).bounds.withVariance(1)) + argRefinements(defn.LambdaTraitOBS(variances).typeRef, 0, argBoundsFns), + tpnme.hkApplyOBS, bodyFn(_).bounds.withVariance(1)) } - def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match { - case app @ RefinedType(parent, tpnme.hkApply, refinedInfo) => - val cls = parent.typeSymbol - val variances = cls.typeParams.map(_.variance) - def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match { - case t @ RefinedType(p, rname, rinfo) => - assert(rname.isHkArgName) - collectBounds(p, rinfo.bounds :: acc) - case TypeRef(_, lname) => - assert(lname.isLambdaTraitName) - acc + def unapply(tp: Type)(implicit ctx: Context): Option[(/*List[Int], */List[TypeBounds], Type)] = + if (Config.newHK) { + def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { + case t @ RefinedType(p, rname, rinfo: TypeBounds) if rname.isHkArgName && rinfo.isBinding => + decompose(p, rinfo.bounds :: acc) + case t: RecType => + decompose(t.parent, acc) + case _ => + (acc, t) } - val argBounds = collectBounds(parent, Nil) - Some((variances, argBounds, refinedInfo.argInfo)) - case _ => - None + decompose(tp, Nil) match { + case (Nil, _) => None + case x => Some(x) +// case (bindings, tp) => Some((Nil, bindings, tp)) + } + } + else tp match { + case app @ RefinedType(parent, tpnme.hkApplyOBS, refinedInfo) => + val cls = parent.typeSymbol + val variances = cls.typeParams.map(_.variance) + def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match { + case t @ RefinedType(p, rname, rinfo) => + assert(rname.isHkArgName) + collectBounds(p, rinfo.bounds :: acc) + case TypeRef(_, lname) => + assert(lname.isLambdaTraitNameOBS) + acc + } + val argBounds = collectBounds(parent, Nil) + Some((argBounds, refinedInfo.argInfo)) + case _ => + None } } @@ -156,7 +158,7 @@ object TypeApplications { false } tp match { - case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args)) + case TypeLambda(argBounds, AppliedType(fn: TypeRef, args)) if argsAreForwarders(args, tp.typeParams.length) => Some(fn) case _ => None } @@ -177,10 +179,32 @@ object TypeApplications { def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args) def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { - case TypeRef(prefix, tpnme.hkApply) => unapp(prefix) + case tp: RefinedType if Config.newHK => + var refinements: List[RefinedType] = Nil + var tycon = tp.stripTypeVar + while (tycon.isInstanceOf[RefinedType]) { + val rt = tycon.asInstanceOf[RefinedType] + refinements = rt :: refinements + tycon = rt.parent.stripTypeVar + } + def collectArgs(tparams: List[MemberBinding], + refinements: List[RefinedType], + argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match { + case Nil if tparams.isEmpty && argBuf.nonEmpty => + Some((tycon, argBuf.toList)) + case RefinedType(_, rname, rinfo) :: refinements1 + if tparams.nonEmpty && rname == tparams.head.memberName => + collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo) + case _ => + None + } + collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type]) + case TypeRef(prefix, tpnme.hkApplyOBS) if !Config.newHK => + unapp(prefix) case _ => - unapp(tp) match { - case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTrait => + if (Config.newHK) None + else unapp(tp) match { + case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTraitOBS => // We are seeing part of a lambda abstraction, not an applied type None case x => x @@ -239,7 +263,7 @@ object TypeApplications { * - bounds `Bi` are the intersection of the corresponding type parameter bounds * of `tp1` and `tp2`. */ - def hkCombine(tp1: Type, tp2: Type, + def hkCombineOBS(tp1: Type, tp2: Type, tparams1: List[TypeSymbol], tparams2: List[TypeSymbol], op: (Type, Type) => Type) (implicit ctx: Context): Type = { val variances = (tparams1, tparams2).zipped.map { (tparam1, tparam2) => @@ -258,7 +282,25 @@ object TypeApplications { val app1: RefinedType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) val app2: RefinedType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) val body: RefinedType => Type = rt => op(app1(rt), app2(rt)) - TypeLambda(variances, bounds, body) + TypeLambda.applyOBS(variances, bounds, body) + } + + private class InstMap(fullType: Type)(implicit ctx: Context) extends TypeMap { + var localRecs: Set[RecType] = Set.empty + var keptRefs: Set[Name] = Set.empty + var isSafe: Boolean = true + def apply(tp: Type): Type = tp match { + case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => + fullType.member(sel).info match { + case TypeAlias(alias) => apply(alias) + case _ => keptRefs += sel; tp + } + case tp: TypeVar if !tp.inst.exists => + isSafe = false + tp + case _ => + mapOver(tp) + } } } @@ -290,7 +332,8 @@ class TypeApplications(val self: Type) extends AnyVal { else tsym.infoOrCompleter match { case completer: TypeParamsCompleter => val tparams = completer.completerTypeParams(tsym) - defn.LambdaTrait(tparams.map(_.variance)).typeParams + if (Config.newHK) tparams + else defn.LambdaTraitOBS(tparams.map(_.variance)).typeParams case _ => if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams else @@ -305,11 +348,15 @@ class TypeApplications(val self: Type) extends AnyVal { // inlined and optimized version of // val sym = self.LambdaTrait // if (sym.exists) return sym.typeParams - if (self.refinedName == tpnme.hkApply) { + if (!Config.newHK && self.refinedName == tpnme.hkApplyOBS) { val sym = self.parent.classSymbol - if (sym.isLambdaTrait) return sym.typeParams + if (sym.isLambdaTraitOBS) return sym.typeParams } - self.parent.typeParams.filterNot(_.memberName == self.refinedName) + val precedingParams = self.parent.typeParams + if (Config.newHK && self.isTypeParam) precedingParams :+ self + else precedingParams.filterNot(_.memberName == self.refinedName) + case self: RecType => + self.parent.typeParams case self: SingletonType => Nil case self: TypeProxy => @@ -319,6 +366,11 @@ class TypeApplications(val self: Type) extends AnyVal { } } + final def hkTypeParams(implicit ctx: Context): List[MemberBinding] = + if (Config.newHK) + if (isHK) typeParams else Nil + else LambdaTraitOBS.typeParams + final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = { val tparams = typeParams assert(tparams.isEmpty || tparams.head.isInstanceOf[Symbol]) @@ -387,25 +439,27 @@ class TypeApplications(val self: Type) extends AnyVal { } /** The Lambda trait underlying a type lambda */ - def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match { - case RefinedType(_, tpnme.hkApply, _) => + def LambdaTraitOBS(implicit ctx: Context): Symbol = self.stripTypeVar match { + case RefinedType(_, tpnme.hkApplyOBS, _) => val sym = self.classSymbol - if (sym.isLambdaTrait) sym else NoSymbol - case TypeBounds(lo, hi) => hi.LambdaTrait + if (sym.isLambdaTraitOBS) sym else NoSymbol + case TypeBounds(lo, hi) => hi.LambdaTraitOBS case _ => NoSymbol } - /** Is receiver type higher-kinded (i.e. of kind != "*")? */ + /** Is self type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case RefinedType(_, tpnme.hkApply, _) => true + case self: RefinedType => self.refinedName == tpnme.hkApplyOBS || self.isTypeParam + case self: RecType => self.parent.isHK case TypeBounds(_, hi) => hi.isHK case _ => false } /** is receiver of the form T#$Apply? */ - def isHKApply: Boolean = self match { - case TypeRef(_, name) => name == tpnme.hkApply + def isHKApply(implicit ctx: Context): Boolean = self match { + case self @ RefinedType(_, name, _) => Config.newHK && name.isHkArgName && !self.isTypeParam + case TypeRef(_, name) => !Config.newHK && (name == tpnme.hkApplyOBS) case _ => false } @@ -423,7 +477,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeRef => self.denot.exists && { val sym = self.symbol - if (sym.isClass) !sym.isLambdaTrait + if (sym.isClass) !sym.isLambdaTraitOBS else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda } case _ => @@ -449,10 +503,22 @@ class TypeApplications(val self: Type) extends AnyVal { /** Replace references to type parameters with references to hk arguments `this.$hk_i` * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. */ - def recursify[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RecType => T = - (rt: RecType) => - new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) - .apply(self).asInstanceOf[T] + def recursify[T <: Type](tparams: List[MemberBinding])(implicit ctx: Context): RecType => T = + tparams match { + case (_: Symbol) :: _ => + (rt: RecType) => + new ctx.SafeSubstMap(tparams.asInstanceOf[List[Symbol]], argRefs(rt, tparams.length)) + .apply(self).asInstanceOf[T] + case _ => + assert(Config.newHK) + def mapRefs(rt: RecType) = new TypeMap { + def apply(t: Type): Type = t match { + case rthis: RecThis if tparams contains rthis.binder.parent => RecThis(rt) + case _ => mapOver(t) + } + } + mapRefs(_).apply(self).asInstanceOf[T] + } /** Lambda abstract `self` with given type parameters. Examples: * @@ -469,27 +535,65 @@ class TypeApplications(val self: Type) extends AnyVal { new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) .apply(tp).asInstanceOf[T] - def expand(tp: Type) = { - TypeLambda( - tparams.map(_.variance), - tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)), - internalize(tp)) - } + def expand(tp: Type) = + if (Config.newHK) + TypeLambda( + tparams.map(tparam => + tparam.memberBoundsAsSeenFrom(self) + .withBindingKind(BindingKind.fromVariance(tparam.variance)) + .recursify(tparams)), + tp.recursify(tparams)) + else + TypeLambda.applyOBS( + tparams.map(_.variance), + tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)), + internalize(tp)) + + assert(!isHK, self) self match { case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias)) + self.derivedTypeAlias(expand(self.alias.BetaReduce)) case self @ TypeBounds(lo, hi) => - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi))) + if (Config.newHK) + self.derivedTypeBounds(lo, expand(hi.BetaReduce)) + else + self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce))) case _ => expand(self) } } - /** A type ref is eta expandable if it refers to a non-lambda class. + def BetaReduce(implicit ctx: Context): Type = self.dealias match { + case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName => + val inst = new InstMap(self) + def instTop(tp: Type): Type = { + if (!inst.isSafe) tp + else tp.dealias match { + case tp: RecType => + inst.localRecs += tp + tp.rebind(instTop(tp.parent)) + case tp @ RefinedType(parent, rname, rinfo) => + rinfo match { + case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgName && inst.localRecs.contains(rt) => + instTop(tp.derivedRefinedType(parent, rname, self.member(sel).info)) + case _ => + val parent1 = instTop(parent) + if (rname.isHkArgName && !inst.keptRefs.contains(rname)) parent1 + else tp.derivedRefinedType(parent1, rname, inst(rinfo)) + } + case tp => + inst(tp) + }} + val reduced = instTop(self) + if (inst.isSafe) reduced else self + case self1 => self1 + } + + /** A type ref is eta expandable if it refers to a non-lambda class. * In that case we can look for parameterized base types of the type * to eta expand them. */ def isEtaExpandable(implicit ctx: Context) = self match { - case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName + case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitNameOBS case _ => false } @@ -519,8 +623,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = { - val boundLambda = bound.LambdaTrait - val hkParams = boundLambda.typeParams + val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self else self match { case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => @@ -561,15 +664,36 @@ class TypeApplications(val self: Type) extends AnyVal { * is covariant is irrelevant, so can be ignored. */ def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = { - val boundLambda = bound.LambdaTrait - val hkParams = boundLambda.typeParams + val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self + else if (Config.newHK) { + def adaptArg(arg: Type): Type = arg match { + case arg @ TypeLambda(tparamBounds, body) if + !arg.typeParams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && + arg.typeParams.corresponds(hkParams)(varianceConforms) => + def adjustVariance(bounds: TypeBounds, tparam: MemberBinding): TypeBounds = + bounds.withBindingKind(BindingKind.fromVariance(tparam.memberVariance)) + def lift[T <: Type](tp: T): (RecType => T) = arg match { + case rt0: RecType => tp.subst(rt0, _).asInstanceOf[T] + case _ => (x => tp) + } + val adjusted = (tparamBounds, hkParams).zipped.map(adjustVariance) + TypeLambda(adjusted.map(lift), lift(body)) + case arg @ TypeAlias(alias) => + arg.derivedTypeAlias(adaptArg(alias)) + case arg @ TypeBounds(lo, hi) => + arg.derivedTypeBounds(lo, adaptArg(hi)) + case _ => + arg + } + adaptArg(self) + } else { def adaptArg(arg: Type): Type = arg match { - case arg: TypeRef if arg.symbol.isLambdaTrait && - !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.variance) && - arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) => - arg.prefix.select(boundLambda) + case arg: TypeRef if arg.symbol.isLambdaTraitOBS && + !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.memberVariance) && + arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) => + arg.prefix.select(bound.LambdaTraitOBS) case arg: RefinedType => arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) case arg: RecType => @@ -605,6 +729,8 @@ class TypeApplications(val self: Type) extends AnyVal { def apply(tp: Type): Type = tp match { case TypeRef(RefinedThis(rt), name) if rt.eq(self) && name.isHkArgName => args(name.hkArgIndex) + case TypeRef(RecThis(rt), name) if rt.eq(self) && name.isHkArgName => + args(name.hkArgIndex) case _ => mapOver(tp) } @@ -613,7 +739,7 @@ class TypeApplications(val self: Type) extends AnyVal { else self.stripTypeVar match { case EtaExpansion(self1) => self1.appliedTo(args) - case TypeLambda(_, _, body) if !args.exists(_.isInstanceOf[TypeBounds]) => + case TypeLambda(_, body) if !args.exists(_.isInstanceOf[TypeBounds]) => substHkArgs(body) case self: PolyType => self.instantiate(args) @@ -643,8 +769,8 @@ class TypeApplications(val self: Type) extends AnyVal { } assert(args.nonEmpty) matchParams(self, typParams, args) match { - case refined @ RefinedType(_, pname, _) if pname.isHkArgName => - TypeRef(refined, tpnme.hkApply) + case refined @ RefinedType(_, pname, _) if pname.isHkArgName && !Config.newHK => + TypeRef(refined, tpnme.hkApplyOBS) case refined => refined } @@ -663,7 +789,8 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") - defn.LambdaTrait(args map alwaysZero).typeParams + if (Config.newHK) fallbackTypeParams(args.length) + else defn.LambdaTraitOBS(args map alwaysZero).typeParams case _ => typeParams } @@ -853,6 +980,6 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => false } - recur(self) + !Config.newHK && recur(self) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 58c6bea3a..3648b3764 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -178,11 +178,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { && !tp1.isInstanceOf[WithFixedSym] && !tp2.isInstanceOf[WithFixedSym] ) || - compareHkApply(tp1, tp2, inOrder = true) || - compareHkApply(tp2, tp1, inOrder = false) || + compareHkApplyOBS(tp1, tp2, inOrder = true) || + compareHkApplyOBS(tp2, tp1, inOrder = false) || thirdTryNamed(tp1, tp2) case _ => - compareHkApply(tp2, tp1, inOrder = false) || + compareHkApplyOBS(tp2, tp1, inOrder = false) || secondTry(tp1, tp2) } } @@ -259,7 +259,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { if (tp1.prefix.isStable) return false case _ => } - compareHkApply(tp1, tp2, inOrder = true) || + compareHkApplyOBS(tp1, tp2, inOrder = true) || thirdTry(tp1, tp2) case tp1: PolyParam => def flagNothingBound = { @@ -354,6 +354,12 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2: RefinedType => def compareRefinedSlow: Boolean = { val name2 = tp2.refinedName + if (name2.isHkArgName) { + val tp2reduced = tp2.BetaReduce + if (Config.newHK && (tp2reduced ne tp2)) return isSubType(tp1, tp2reduced) + if (Config.newHK && tp2.isTypeParam) return compareHkLambda(tp2, tp1, inOrder = false) + if (Config.newHK && !tp1.isHKApply) return compareHkApply(tp2, tp1, inOrder = false) + } isSubType(tp1, tp2.parent) && (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2)) } @@ -370,6 +376,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => compareRefinedSlow || fourthTry(tp1, tp2) || + compareHkApply(tp2, tp1, inOrder = false) || compareHkLambda(tp2, tp1, inOrder = false) || compareAliasedRefined(tp2, tp1, inOrder = false) } @@ -494,9 +501,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => + if (Config.newHK && tp1.refinedName.isHkArgName) { + val tp1reduced = tp1.BetaReduce + if (Config.newHK && (tp1reduced ne tp1)) return isSubType(tp1reduced, tp2) + if (Config.newHK && tp1.isTypeParam) return compareHkLambda(tp1, tp2, inOrder = true) + if (Config.newHK && !tp2.isHKApply) return compareHkApply(tp1, tp2, inOrder = true) + } isNewSubType(tp1.parent, tp2) || - compareHkLambda(tp1, tp2, inOrder = true) || - compareAliasedRefined(tp1, tp2, inOrder = true) + !Config.newHK && compareHkLambda(tp1, tp2, inOrder = true) || + !Config.newHK && compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => isNewSubType(tp1.parent, tp2) case AndType(tp11, tp12) => @@ -535,7 +548,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * - `B` satisfies predicate `p`. */ private def testLifted(tp1: Type, tp2: Type, tparams: List[MemberBinding], p: Type => Boolean): Boolean = { - val classBounds = tp2.member(tpnme.hkApply).info.classSymbols + val classBounds = + if (Config.newHK) tp2.classSymbols + else tp2.member(tpnme.hkApplyOBS).info.classSymbols def recur(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => val baseRef = tp1.baseTypeRef(bc) @@ -570,7 +585,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * * (4) If `inOrder`, test `projection <: other` else test `other <: projection`. */ - def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean): Boolean = { + def compareHkApplyOBS(projection: NamedType, other: Type, inOrder: Boolean): Boolean = { def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($projection, $other, inOrder = $inOrder, constr = $tp)", subtyping) { tp match { case tp: TypeVar => tryInfer(tp.underlying) @@ -592,7 +607,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } val hkTypeParams = param.typeParams - subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApply).info.classSymbols}") + subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApplyOBS).info.classSymbols}") subtyping.println(i"base classes = ${other.baseClasses}") subtyping.println(i"type params = $hkTypeParams") if (inOrder) unifyWith(other) @@ -601,13 +616,67 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } } - projection.name == tpnme.hkApply && !other.isHKApply && + !Config.newHK && projection.name == tpnme.hkApplyOBS && !other.isHKApply && tryInfer(projection.prefix.typeConstructor.dealias) } + /** If `projection` is a hk projection T#$apply with a constrainable poly param + * as type constructor and `other` is not a hk projection, then perform the following + * steps: + * + * (1) If not `inOrder` then perform the next steps until they all succeed + * for each base type of other which + * - derives from a class bound of `projection`, + * - has the same number of type parameters than `projection` + * - has type parameter variances which conform to those of `projection`. + * If `inOrder` then perform the same steps on the original `other` type. + * + * (2) Try to eta expand the constructor of `other`. + * + * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter + * by the eta expansion of step (2) reapplied to the projection's arguments. + * (3b) In normal mode, try to unify the projection's hk constructor parameter with + * the eta expansion of step(2) + * + * (4) If `inOrder`, test `projection <: other` else test `other <: projection`. + */ + def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean): Boolean = { + def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($app, $other, inOrder = $inOrder, constr = $tp)", subtyping) { + tp match { + case tp: TypeVar => tryInfer(tp.underlying) + case param: PolyParam if canConstrain(param) => + + def unifyWith(liftedOther: Type): Boolean = { + subtyping.println(i"unify with $liftedOther") + liftedOther.typeConstructor.widen match { + case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty => + val (ok, app1) = + if (ctx.mode.is(Mode.TypevarsMissContext)) + (true, EtaExpansion(tycon).appliedTo(app.argInfos)) + else + (tryInstantiate(param, EtaExpansion(tycon)), app) + ok && + (if (inOrder) isSubType(app1, other) else isSubType(other, app1)) + case _ => + false + } + } + val hkTypeParams = param.typeParams + subtyping.println(i"classBounds = ${app.classSymbols}") + subtyping.println(i"base classes = ${other.baseClasses}") + subtyping.println(i"type params = $hkTypeParams") + if (inOrder) unifyWith(other) + else testLifted(other, app, hkTypeParams, unifyWith) + case _ => + false + } + } + Config.newHK && app.isHKApply && !other.isHKApply && tryInfer(app.typeConstructor.dealias) + } + /** Compare type lambda with non-lambda type. */ def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match { - case TypeLambda(vs, args, body) => + case TypeLambda(args, body) => other.isInstanceOf[TypeRef] && args.length == other.typeParams.length && { val applied = other.appliedTo(argRefs(rt, args.length)) @@ -1123,15 +1192,39 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * allowing both interpretations. A possible remedy is to be somehow stricter * in where we allow which interpretation. */ - private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = { - val tparams1 = tp1.typeParamSymbols // TODO revise for new hk scheme - val tparams2 = tp2.typeParamSymbols - def onlyNamed(tparams: List[TypeSymbol]) = tparams.forall(!_.is(ExpandedName)) - if (tparams1.isEmpty || tparams2.isEmpty || + private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = + if (Config.newHK) { + val tparams1 = tp1.typeParams + val tparams2 = tp2.typeParams + if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) + else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) + else { + val bindings: List[RecType => TypeBounds] = + (tparams1, tparams2).zipped.map { (tparam1, tparam2) => + val b1: RecType => TypeBounds = + tparam1.memberBoundsAsSeenFrom(tp1).recursify(tparams1) + val b2: RecType => TypeBounds = + tparam2.memberBoundsAsSeenFrom(tp2).recursify(tparams2) + (rt: RecType) => (b1(rt) & b2(rt)) + .withBindingKind( + BindingKind.fromVariance( + (tparam1.memberVariance + tparam2.memberVariance) / 2)) + } + val app1: RecType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) + val app2: RecType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) + val body: RecType => Type = rt => op(app1(rt), app2(rt)) + TypeLambda(bindings, body) + } + } + else { + val tparams1 = tp1.typeParamSymbols + val tparams2 = tp2.typeParamSymbols + def onlyNamed(tparams: List[TypeSymbol]) = tparams.forall(!_.is(ExpandedName)) + if (tparams1.isEmpty || tparams2.isEmpty || onlyNamed(tparams1) && onlyNamed(tparams2)) op(tp1, tp2) - else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) - else hkCombine(tp1, tp2, tparams1, tparams2, op) - } + else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) + else hkCombineOBS(tp1, tp2, tparams1, tparams2, op) + } /** Try to distribute `&` inside type, detect and handle conflicts * @pre !(tp1 <: tp2) && !(tp2 <:< tp1) -- these cases were handled before @@ -1415,15 +1508,16 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) - override def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean) = - if (projection.name == tpnme.hkApply) - traceIndented(i"compareHkApply $projection, $other, $inOrder") { - super.compareHkApply(projection, other, inOrder) + override def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean) = + if (app.isHKApply) + traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.typeConstructor.dealias}") { + super.compareHkApply(app, other, inOrder) } - else super.compareHkApply(projection, other, inOrder) + else super.compareHkApply(app, other, inOrder) override def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = - if (rt.refinedName == tpnme.hkApply) + if (!Config.newHK && rt.refinedName == tpnme.hkApplyOBS || + Config.newHK && rt.isTypeParam) traceIndented(i"compareHkLambda $rt, $other, $inOrder") { super.compareHkLambda(rt, other, inOrder) } diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 54846087f..bb9566b6f 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -6,6 +6,7 @@ import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._ import SymDenotations._, Denotations.SingleDenotation import config.Printers._ import util.Positions._ +import NameOps._ import Decorators._ import StdNames._ import Annotations._ @@ -382,7 +383,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias match { + def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 42abc4251..22a26968c 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -465,8 +465,10 @@ object Types { case _ => NoDenotation } - def goRec(tp: RecType) = + def goRec(tp: RecType) = { + //println(s"find member $pre . $name in $tp") go(tp.parent).mapInfo(_.substRecThis(tp, pre)) + } def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) val rinfo = @@ -960,7 +962,7 @@ object Types { else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce + case _ => if (name == tpnme.hkApplyOBS) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce } case _ => loop(pre.parent) } @@ -1624,7 +1626,7 @@ object Types { ctx.underlyingRecursions -= 1 } - /** A selection of the same kind, but with potentially a differet prefix. + /** A selection of the same kind, but with potentially a different prefix. * The following normalizations are performed for type selections T#A: * * T#A --> B if A is bound to an alias `= B` in T @@ -1641,7 +1643,7 @@ object Types { else if (isType) { val res = prefix.lookupRefined(name) if (res.exists) res - else if (name == tpnme.hkApply && prefix.classNotLambda) { + else if (name == tpnme.hkApplyOBS && prefix.classNotLambda) { // After substitution we might end up with a type like // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply` // where C is a class. In that case we eta expand `C`. @@ -1751,7 +1753,11 @@ object Types { type ThisType = TypeRef - override def underlying(implicit ctx: Context): Type = info + override def underlying(implicit ctx: Context): Type = { + val res = info + assert(res != this, this) + res + } } final class TermRefWithSignature(prefix: Type, name: TermName, override val sig: Signature) extends TermRef(prefix, name) { @@ -1922,7 +1928,7 @@ object Types { object TypeRef { def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = - if (name == tpnme.hkApply && prefix.classNotLambda) + if (name == tpnme.hkApplyOBS && prefix.classNotLambda) assert(false, s"bad type : $prefix.$name does not allow $$Apply projection") /** Create type ref with given prefix and name */ @@ -2073,7 +2079,7 @@ object Types { throw new AssertionError(s"bad instantiation: $this") def checkInst(implicit ctx: Context): this.type = { - if (refinedName == tpnme.hkApply) + if (refinedName == tpnme.hkApplyOBS) parent.stripTypeVar match { case RefinedType(_, name, _) if name.isHkArgName => // ok case _ => badInst @@ -3015,6 +3021,8 @@ object Types { def withBindingKind(bk: BindingKind)(implicit ctx: Context) = derivedTypeBounds(lo, hi, bk) + //def checkBinding: this.type = { assert(isBinding); this } + def contains(tp: Type)(implicit ctx: Context): Boolean = tp match { case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi case tp: ClassInfo => @@ -3052,16 +3060,20 @@ object Types { /** If this type and that type have the same variance, this variance, otherwise 0 */ final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2 - override def computeHash = doHash(variance, lo, hi) + override def computeHash = doHash(variance * 41 + bindingKind.n, lo, hi) override def equals(that: Any): Boolean = that match { case that: TypeBounds => - (this.lo eq that.lo) && (this.hi eq that.hi) && this.variance == that.variance + (this.lo eq that.lo) && (this.hi eq that.hi) && + (this.variance == that.variance) && (this.bindingKind == that.bindingKind) case _ => false } - override def toString = - if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)" + override def toString = { + def bkString = if (isBinding) s"|bk=${BindingKind.toVariance(bindingKind)}" else "" + if (lo eq hi) s"TypeAlias($lo, $variance)" + else s"TypeBounds($lo, $hi$bkString)" + } } class RealTypeBounds(lo: Type, hi: Type, bk: BindingKind) extends TypeBounds(lo, hi)(bk) diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 1b4e7845a..d9a062263 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -325,8 +325,8 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { readPackageRef().termRef case TYPEREF => val name = readName().toTypeName - if (name.isLambdaTraitName) // Make sure curresponding lambda trait exists - defn.LambdaTrait(name.lambdaTraitVariances) + if (name.isLambdaTraitNameOBS) // Make sure corresponding lambda trait exists + defn.LambdaTraitOBS(name.lambdaTraitVariancesOBS) TypeRef(readType(), name) case TERMREF => readNameSplitSig() match { diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 2663777af..557a9df74 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -632,7 +632,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case info => tp.derivedRefinedType(parent1, name, info) } - case tp @ TypeRef(pre, tpnme.hkApply) => + case tp @ TypeRef(pre, tpnme.hkApplyOBS) => tp.derivedSelect(elim(pre)) case _ => tp diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 59f1608db..20bf8b407 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -50,7 +50,7 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) - case tp @ TypeRef(_, tpnme.hkApply) => + case tp @ TypeRef(_, tpnme.hkApplyOBS) => val tp1 = tp.reduceProjection if (tp1 eq tp) tp else homogenize(tp1) case tp: LazyRef => diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 1020468a9..b5bc17c0c 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -116,7 +116,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isFunctionClass(cls)) return toTextFunction(args) if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close - case tp @ TypeLambda(variances, argBoundss, body) => + case tp @ TypeLambda(argBoundss, body) => + val variances = tp.classSymbol.typeParams.map(_.variance) val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar val paramNames = variances.indices.toList.map(prefix.toString + _) val instantiate = new TypeMap { diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index b7e2fd832..4752d2827 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -432,7 +432,7 @@ trait TypeAssigner { .withBindingKind(BindingKind.fromVariance(tparam.variance)) .recursify(tparams)) val bodyFn = body.tpe.recursify(tparams) - tree.withType(TypeApplicationsNewHK.TypeLambda(argBindingFns, bodyFn)) + tree.withType(TypeApplications.TypeLambda(argBindingFns, bodyFn)) } def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = -- cgit v1.2.3 From 5daae278392ed6fabd45c9fa55aded970ca2a348 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:12:29 +0200 Subject: Multiple fixes - Swap order of tests in lookupRefined - Change joins of BindingKinds. A type parameter joint with a normal refinement represents a type parameter that has been filled in. So the Binding attribute should be removed. - Fix printing of type lambdas under new hk scheme - refine isRef for hk type The new definition avoids that a higher-kinded type "isRef" of an underlying class. I.e. `[X] -> Any` is not longer a ref to `Any`. - Fix withBindingKind for type aliases Old definition converted aliases to type bounds. - Multiple fixes to BetaReduce - Fix logic for hk subtype tests - Make isHK more precise --- src/dotty/tools/dotc/core/TypeApplications.scala | 84 +++++++++++++++++----- src/dotty/tools/dotc/core/TypeComparer.scala | 33 ++++----- src/dotty/tools/dotc/core/Types.scala | 32 ++++++--- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 7 +- 4 files changed, 109 insertions(+), 47 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 9ceae6e5f..dfd1caf62 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -285,17 +285,18 @@ object TypeApplications { TypeLambda.applyOBS(variances, bounds, body) } - private class InstMap(fullType: Type)(implicit ctx: Context) extends TypeMap { + private class InstMap(fullType: Type, shortLived: Boolean)(implicit ctx: Context) extends TypeMap { var localRecs: Set[RecType] = Set.empty var keptRefs: Set[Name] = Set.empty var isSafe: Boolean = true + var tyconIsHK: Boolean = true def apply(tp: Type): Type = tp match { case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => fullType.member(sel).info match { case TypeAlias(alias) => apply(alias) case _ => keptRefs += sel; tp } - case tp: TypeVar if !tp.inst.exists => + case tp: TypeVar if !tp.inst.exists && !shortLived => isSafe = false tp case _ => @@ -451,8 +452,10 @@ class TypeApplications(val self: Type) extends AnyVal { def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK case self: RefinedType => self.refinedName == tpnme.hkApplyOBS || self.isTypeParam - case self: RecType => self.parent.isHK - case TypeBounds(_, hi) => hi.isHK + case self: SingletonType => false + case self: TypeVar => self.origin.isHK + case self: WildcardType => self.optBounds.isHK + case self: TypeProxy => self.underlying.isHK case _ => false } @@ -552,20 +555,55 @@ class TypeApplications(val self: Type) extends AnyVal { assert(!isHK, self) self match { case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias.BetaReduce)) + self.derivedTypeAlias(expand(self.alias.BetaReduce())) case self @ TypeBounds(lo, hi) => if (Config.newHK) - self.derivedTypeBounds(lo, expand(hi.BetaReduce)) + self.derivedTypeBounds(lo, expand(hi.BetaReduce())) else - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce))) + self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce()))) case _ => expand(self) } } - def BetaReduce(implicit ctx: Context): Type = self.dealias match { - case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName => - val inst = new InstMap(self) - def instTop(tp: Type): Type = { + /** If `self` is a * type, perform the following rewritings: + * + * 1. For every occurrence of `z.$hk_i`, where `z` is a RecThis type that refers + * to some recursive type in `self`, if the member of `self.hk$i` has an alias + * type `= U`: + * + * z.$hk_i --> U + * + * 2. For every top-level binding `type A = z.$hk_i$, where `z` is a RecThis type that refers + * to some recursive type in `self`, if the member of `self` has bounds `S..U`: + * + * type A = z.$hk_i --> type A >: S <: U + * + * 3. If the type constructor preceding all bindings is a * type, delete every top-level + * binding `{ type $hk_i ... }` where `$hk_i` does not appear in the prefix of the binding. + * I.e. + * + * T { type $hk_i ... } --> T + * + * If `$hk_i` does not appear in `T`. + * + * A binding is top-level if it can be reached by + * + * - following aliases + * - dropping refinements and rec-types + * - going from a wildcard type to its upper bound + * + * @param shortLived If `false` suppresses all rewritings where a type variable with + * an unknown or uncommitted instance is rewritten. Reason: If the + * type variable is finally instantiated to something else, the + * reduction might not be valid anymore. However, when reducing + * during `<:<` tests `shortLived` is true and the reduction + * is never suppressed, because then we are only interested + * in subtyping relative to the current context. + */ + def BetaReduce(shortLived: Boolean = false)(implicit ctx: Context): Type = self.dealias match { + case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName && self1.typeParams.isEmpty => + val inst = new InstMap(self, shortLived) + def instTop(tp: Type): Type = if (!inst.isSafe) tp else tp.dealias match { case tp: RecType => @@ -574,18 +612,30 @@ class TypeApplications(val self: Type) extends AnyVal { case tp @ RefinedType(parent, rname, rinfo) => rinfo match { case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgName && inst.localRecs.contains(rt) => - instTop(tp.derivedRefinedType(parent, rname, self.member(sel).info)) + val bounds @ TypeBounds(_, _) = self.member(sel).info + instTop(tp.derivedRefinedType(parent, rname, bounds.withBindingKind(NoBinding))) case _ => val parent1 = instTop(parent) - if (rname.isHkArgName && !inst.keptRefs.contains(rname)) parent1 + if (rname.isHkArgName && + !inst.tyconIsHK && + !inst.keptRefs.contains(rname)) parent1 else tp.derivedRefinedType(parent1, rname, inst(rinfo)) } + case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => + tp.derivedWildcardType(bounds.derivedTypeBounds(inst(lo), instTop(hi))) case tp => - inst(tp) - }} + inst.tyconIsHK = tp.isHK + val res = inst(tp) + tp match { + case tp: WildcardType => + println(s"inst $tp --> $res") + case _ => + } + res + } val reduced = instTop(self) if (inst.isSafe) reduced else self - case self1 => self1 + case _ => self } /** A type ref is eta expandable if it refers to a non-lambda class. @@ -761,7 +811,7 @@ class TypeApplications(val self: Type) extends AnyVal { matchParams(RefinedType(t, tparam.memberName, arg.toBounds(tparam)), tparams1, args1) } catch { case ex: MatchError => - println(s"applied type mismatch: $self $args, typeParams = $typParams") // !!! DEBUG + println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG //println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}") throw ex } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 3648b3764..b0c36ca58 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -354,12 +354,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2: RefinedType => def compareRefinedSlow: Boolean = { val name2 = tp2.refinedName - if (name2.isHkArgName) { - val tp2reduced = tp2.BetaReduce - if (Config.newHK && (tp2reduced ne tp2)) return isSubType(tp1, tp2reduced) - if (Config.newHK && tp2.isTypeParam) return compareHkLambda(tp2, tp1, inOrder = false) - if (Config.newHK && !tp1.isHKApply) return compareHkApply(tp2, tp1, inOrder = false) - } isSubType(tp1, tp2.parent) && (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2)) } @@ -374,10 +368,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => - compareRefinedSlow || - fourthTry(tp1, tp2) || compareHkApply(tp2, tp1, inOrder = false) || compareHkLambda(tp2, tp1, inOrder = false) || + compareRefinedSlow || + fourthTry(tp1, tp2) || compareAliasedRefined(tp2, tp1, inOrder = false) } else // fast path, in particular for refinements resulting from parameterization. @@ -501,15 +495,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - if (Config.newHK && tp1.refinedName.isHkArgName) { - val tp1reduced = tp1.BetaReduce - if (Config.newHK && (tp1reduced ne tp1)) return isSubType(tp1reduced, tp2) - if (Config.newHK && tp1.isTypeParam) return compareHkLambda(tp1, tp2, inOrder = true) - if (Config.newHK && !tp2.isHKApply) return compareHkApply(tp1, tp2, inOrder = true) - } isNewSubType(tp1.parent, tp2) || - !Config.newHK && compareHkLambda(tp1, tp2, inOrder = true) || - !Config.newHK && compareAliasedRefined(tp1, tp2, inOrder = true) + compareHkApply(tp1, tp2, inOrder = true) || + compareHkLambda(tp1, tp2, inOrder = true) || + compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => isNewSubType(tp1.parent, tp2) case AndType(tp11, tp12) => @@ -609,7 +598,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val hkTypeParams = param.typeParams subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApplyOBS).info.classSymbols}") subtyping.println(i"base classes = ${other.baseClasses}") - subtyping.println(i"type params = $hkTypeParams") + subtyping.println(i"type params = ${hkTypeParams.map(_.memberName)}") if (inOrder) unifyWith(other) else testLifted(other, projection.prefix, hkTypeParams, unifyWith) case _ => @@ -671,13 +660,17 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } } - Config.newHK && app.isHKApply && !other.isHKApply && tryInfer(app.typeConstructor.dealias) + Config.newHK && app.isHKApply && !other.isHKApply && { + val reduced = app.BetaReduce(shortLived = true) + if (reduced ne app) + if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) + else tryInfer(app.typeConstructor.dealias) + } } /** Compare type lambda with non-lambda type. */ def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match { case TypeLambda(args, body) => - other.isInstanceOf[TypeRef] && args.length == other.typeParams.length && { val applied = other.appliedTo(argRefs(rt, args.length)) if (inOrder) isSubType(body, applied) @@ -1510,7 +1503,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean) = if (app.isHKApply) - traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.typeConstructor.dealias}") { + traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.BetaReduce(shortLived = true)}") { super.compareHkApply(app, other, inOrder) } else super.compareHkApply(app, other, inOrder) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 22a26968c..78003d972 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -114,7 +114,9 @@ object Types { case TypeAlias(tp) => tp.isRef(sym) case _ => this1.symbol eq sym } - case this1: RefinedOrRecType => + case this1: RefinedType => + !this1.isTypeParam && this1.parent.isRef(sym) + case this1: RecType => this1.parent.isRef(sym) case _ => false @@ -895,6 +897,14 @@ object Types { def narrow(implicit ctx: Context): TermRef = TermRef(NoPrefix, ctx.newSkolem(this)) + /** Useful for diagnsotics: The underlying type if this type is a type proxy, + * otherwise NoType + */ + def underlyingIfProxy(implicit ctx: Context) = this match { + case this1: TypeProxy => this1.underlying + case _ => NoType + } + // ----- Normalizing typerefs over refined types ---------------------------- /** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed @@ -959,10 +969,12 @@ object Types { pre.refinedInfo match { case TypeAlias(alias) => if (pre.refinedName ne name) loop(pre.parent) - else if (!pre.refinementRefersToThis) alias else alias match { case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => if (name == tpnme.hkApplyOBS) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce + case _ => + if (!pre.refinementRefersToThis) alias + else if (name == tpnme.hkApplyOBS) betaReduce(alias) + else NoType } case _ => loop(pre.parent) } @@ -3019,7 +3031,10 @@ object Types { case _ => this } - def withBindingKind(bk: BindingKind)(implicit ctx: Context) = derivedTypeBounds(lo, hi, bk) + def withBindingKind(bk: BindingKind)(implicit ctx: Context) = this match { + case tp: TypeAlias => assert(bk == NoBinding); this + case _ => derivedTypeBounds(lo, hi, bk) + } //def checkBinding: this.type = { assert(isBinding); this } @@ -3070,7 +3085,7 @@ object Types { } override def toString = { - def bkString = if (isBinding) s"|bk=${BindingKind.toVariance(bindingKind)}" else "" + def bkString = if (isBinding) s"|v=${BindingKind.toVariance(bindingKind)}" else "" if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi$bkString)" } @@ -3129,8 +3144,7 @@ object Types { class BindingKind(val n: Byte) extends AnyVal { def join(that: BindingKind) = if (this == that) this - else if (this == NoBinding) that - else if (that == NoBinding) this + else if (this == NoBinding || that == NoBinding) NoBinding else NonvariantBinding } @@ -3202,7 +3216,9 @@ object Types { /** Wildcard type, possibly with bounds */ abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType { def derivedWildcardType(optBounds: Type)(implicit ctx: Context) = - if (optBounds eq this.optBounds) this else WildcardType(optBounds.asInstanceOf[TypeBounds]) + if (optBounds eq this.optBounds) this + else if (!optBounds.exists) WildcardType + else WildcardType(optBounds.asInstanceOf[TypeBounds]) override def computeHash = doHash(optBounds) } diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index b5bc17c0c..61e29982b 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -12,6 +12,7 @@ import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dumm import Trees._ import TypeApplications._ import Decorators._ +import config.Config import scala.annotation.switch import language.implicitConversions @@ -117,9 +118,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close case tp @ TypeLambda(argBoundss, body) => - val variances = tp.classSymbol.typeParams.map(_.variance) + val variances = + if (Config.newHK) argBoundss.map(b => BindingKind.toVariance(b.bindingKind)) + else tp.classSymbol.typeParams.map(_.variance) val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar - val paramNames = variances.indices.toList.map(prefix.toString + _) + val paramNames = argBoundss.indices.toList.map(prefix.toString + _) val instantiate = new TypeMap { def contains(tp1: Type, tp2: Type): Boolean = tp1.eq(tp2) || { -- cgit v1.2.3 From e56bd1fe7a5d2a3821e8c44f0ce35b75be8d7d2b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 2 Jun 2016 18:46:49 +0200 Subject: Beta-reduce when simplifying --- src/dotty/tools/dotc/core/TypeOps.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index bb9566b6f..46771a5aa 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -158,6 +158,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. tp case tp: RefinedType => tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap)) + .BetaReduce() case tp: TypeAlias => tp.derivedTypeAlias(simplify(tp.alias, theMap)) case AndType(l, r) => @@ -383,7 +384,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce match { + def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce() match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => -- cgit v1.2.3 From e61b80ae4db8d2fdd7ed43a834f0de86d1edda15 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 2 Jun 2016 21:05:08 +0200 Subject: Refinement of cycle avoidance The previous fix caused 4 tests to fail under the old hk scheme. --- src/dotty/tools/dotc/core/SymDenotations.scala | 5 +++++ src/dotty/tools/dotc/core/Types.scala | 25 ++++++++++++++++++++++--- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 8fefdf7a7..124199678 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -92,6 +92,11 @@ trait SymDenotations { this: Context => explain("denotation is not a SymDenotation") } } + + /** An anonymous type denotation with an info `>: Nothing <: Any`. Used to + * avoid stackoverflows when computing members of TypeRefs + */ + lazy val anyTypeDenot = new JointRefDenotation(NoSymbol, TypeBounds.empty, Period.allInRun(ctx.runId)) } object SymDenotations { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 78003d972..e4132bf4d 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1575,9 +1575,19 @@ object Types { } } - protected def asMemberOf(prefix: Type)(implicit ctx: Context) = - if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) - else prefix.member(name) + protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation = { + // we might now get cycles over members that are in a refinement but that lack + // a symbol. Without the following precaution i974.scala stackoverflows when compiled + // with new hk scheme. + val saved = lastDenotation + if (name.isTypeName && lastDenotation != null && (lastDenotation.symbol ne NoSymbol)) + lastDenotation = ctx.anyTypeDenot + try + if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) + else prefix.member(name) + finally + if (lastDenotation eq ctx.anyTypeDenot) lastDenotation = saved + } /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type * to an (unbounded) wildcard type. @@ -2091,6 +2101,15 @@ object Types { throw new AssertionError(s"bad instantiation: $this") def checkInst(implicit ctx: Context): this.type = { + if (false && Config.newHK && refinedName.isHkArgName && refinedInfo.isInstanceOf[TypeAlias]) { + parent.stripTypeVar match { + case TypeApplications.TypeLambda(_, _) => + println(i"fshy: $this") + println(s"fshy: $this") + new Error().printStackTrace() + case _ => + } + } if (refinedName == tpnme.hkApplyOBS) parent.stripTypeVar match { case RefinedType(_, name, _) if name.isHkArgName => // ok -- cgit v1.2.3 From a7d61c0ffc99c52109937c899c789ad9ea5d6a5b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 2 Jun 2016 18:59:42 +0200 Subject: Make etaExpandIfHk work for non-symbol type params Fixes a crasher in t2994.scala --- src/dotty/tools/dotc/core/TypeApplications.scala | 12 +++++++++--- src/dotty/tools/dotc/typer/Namer.scala | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index dfd1caf62..c270288b2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -236,9 +236,15 @@ object TypeApplications { /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK */ - def etaExpandIfHK(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] = + def etaExpandIfHK(tparams: List[MemberBinding], args: List[Type])(implicit ctx: Context): List[Type] = if (tparams.isEmpty) args - else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.infoOrCompleter)) + else { + def bounds(tparam: MemberBinding) = tparam match { + case tparam: Symbol => tparam.infoOrCompleter + case tparam: RefinedType => tparam.memberBounds + } + args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(bounds(tparam))) + } /** The references `.this.$hk0, ..., .this.$hk`. */ def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = @@ -374,7 +380,7 @@ class TypeApplications(val self: Type) extends AnyVal { final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = { val tparams = typeParams - assert(tparams.isEmpty || tparams.head.isInstanceOf[Symbol]) + assert(tparams.isEmpty || tparams.head.isInstanceOf[Symbol], self) tparams.asInstanceOf[List[TypeSymbol]] } diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 8437b651c..a8f3b8918 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -989,7 +989,7 @@ class Namer { typer: Typer => if (args.nonEmpty) { val tycon = tp.withoutArgs(args) val tycon1 = this(tycon) - val tparams = tycon.typeParamSymbols + val tparams = tycon.typeParams val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1) } else mapOver(tp) -- cgit v1.2.3 From d0f82a50bffc059803b56a341c8fcd9a238431f7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 3 Jun 2016 18:42:23 +0200 Subject: Don't map info of SkolemTypes in type maps Mapping the info may create new skolems which undermines the idea of a skolem as a fixed reference. In a sense, SkolemTypes are like Termrefs, mapping them does not map their info either. Creating new skolems on the fly in type maps caused some hard find to infinite loops under the new hk scheme. --- src/dotty/tools/dotc/core/Types.scala | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index e4132bf4d..4f5bec56b 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -3333,8 +3333,6 @@ object Types { tp.derivedSuperType(thistp, supertp) protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type = tp.derivedAndOrType(tp1, tp2) - protected def derivedSkolemType(tp: SkolemType, info: Type): Type = - tp.derivedSkolemType(info) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = tp.derivedAnnotatedType(underlying, annot) protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type = @@ -3419,7 +3417,7 @@ object Types { derivedAndOrType(tp, this(tp.tp1), this(tp.tp2)) case tp: SkolemType => - derivedSkolemType(tp, this(tp.info)) + tp case tp @ AnnotatedType(underlying, annot) => val underlying1 = this(underlying) @@ -3522,9 +3520,6 @@ object Types { if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2) else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d else approx(lo = tp1 & tp2) - override protected def derivedSkolemType(tp: SkolemType, info: Type) = - if (info.exists) tp.derivedSkolemType(info) - else NoType override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation) = if (underlying.exists) tp.derivedAnnotatedType(underlying, annot) else NoType -- cgit v1.2.3 From c136af18c1ff37663393e0ad738926776946679e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:35:24 +0200 Subject: Fixes to BetaReduce and asMemberOf; add a second betaReduce The new one only reduces straight applications of type lambdas with definite arguments. It is called very early on appliedTo, and derivedRefinedType. The old one, now renamed to normalizeHkApply also handles wildcard arguments and can garbage collect general unneeded hk-refinements. It is called later, at various places. TODO: See what functionality of normalizeHkApply should go into betaReduce instead. Maybe we can even drop normalizeHkApply? However: need to be careful to maintain aliases for hk type inference. Handle LazyRefs in BetaReduce Needs to be careful to not skip LazyRefs when dealiasing. - Fix^2 of asMemberOf: This fix ensures that - under the old hk scheme test succeeds for compilestdlib and tasty-new-all - under the new scheme test succeeds for i94-nada (i.e. REP[T] = T). - Try to beta-reduce bounds before adding to a constraint. - More subtle handling of LazyRefs in BetaReduce - Another refinement to asMemberOf Need to assume lastSymbol in sync with lastDenotation. - Drop isSafe test from BetaReduce Instead, track the higherkinded argument names that a type variable could potentially instantiate to. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 2 + src/dotty/tools/dotc/core/TypeApplications.scala | 115 ++++++++++++--------- src/dotty/tools/dotc/core/TypeComparer.scala | 4 +- src/dotty/tools/dotc/core/TypeOps.scala | 4 +- src/dotty/tools/dotc/core/Types.scala | 39 ++++--- 5 files changed, 97 insertions(+), 67 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index f8eae186a..3b368ad5e 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -285,6 +285,8 @@ trait ConstraintHandling { if (!addParamBound(bound)) NoType else if (fromBelow) defn.NothingType else defn.AnyType + case bound: RefinedType => + bound.BetaReduce case _ => bound } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c270288b2..7b2d2c3b2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -291,10 +291,9 @@ object TypeApplications { TypeLambda.applyOBS(variances, bounds, body) } - private class InstMap(fullType: Type, shortLived: Boolean)(implicit ctx: Context) extends TypeMap { + private class InstMap(fullType: Type)(implicit ctx: Context) extends TypeMap { var localRecs: Set[RecType] = Set.empty var keptRefs: Set[Name] = Set.empty - var isSafe: Boolean = true var tyconIsHK: Boolean = true def apply(tp: Type): Type = tp match { case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => @@ -302,8 +301,13 @@ object TypeApplications { case TypeAlias(alias) => apply(alias) case _ => keptRefs += sel; tp } - case tp: TypeVar if !tp.inst.exists && !shortLived => - isSafe = false + case tp: TypeVar if !tp.inst.exists => + val bounds = tp.instanceOpt.orElse(ctx.typeComparer.bounds(tp.origin)) + bounds.foreachPart { + case TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => + keptRefs += sel + case _ => + } tp case _ => mapOver(tp) @@ -561,12 +565,12 @@ class TypeApplications(val self: Type) extends AnyVal { assert(!isHK, self) self match { case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias.BetaReduce())) + self.derivedTypeAlias(expand(self.alias.BetaReduce)) case self @ TypeBounds(lo, hi) => if (Config.newHK) - self.derivedTypeBounds(lo, expand(hi.BetaReduce())) + self.derivedTypeBounds(lo, expand(hi.BetaReduce)) else - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce()))) + self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce))) case _ => expand(self) } } @@ -594,57 +598,68 @@ class TypeApplications(val self: Type) extends AnyVal { * * A binding is top-level if it can be reached by * - * - following aliases + * - following aliases unless the type is a LazyRef + * (need to keep cycle breakers around, see i974.scala) * - dropping refinements and rec-types * - going from a wildcard type to its upper bound - * - * @param shortLived If `false` suppresses all rewritings where a type variable with - * an unknown or uncommitted instance is rewritten. Reason: If the - * type variable is finally instantiated to something else, the - * reduction might not be valid anymore. However, when reducing - * during `<:<` tests `shortLived` is true and the reduction - * is never suppressed, because then we are only interested - * in subtyping relative to the current context. */ - def BetaReduce(shortLived: Boolean = false)(implicit ctx: Context): Type = self.dealias match { + def BetaReduce(implicit ctx: Context): Type = self.strictDealias match { case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName && self1.typeParams.isEmpty => - val inst = new InstMap(self, shortLived) - def instTop(tp: Type): Type = - if (!inst.isSafe) tp - else tp.dealias match { - case tp: RecType => - inst.localRecs += tp - tp.rebind(instTop(tp.parent)) - case tp @ RefinedType(parent, rname, rinfo) => - rinfo match { - case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgName && inst.localRecs.contains(rt) => - val bounds @ TypeBounds(_, _) = self.member(sel).info - instTop(tp.derivedRefinedType(parent, rname, bounds.withBindingKind(NoBinding))) - case _ => - val parent1 = instTop(parent) - if (rname.isHkArgName && - !inst.tyconIsHK && - !inst.keptRefs.contains(rname)) parent1 - else tp.derivedRefinedType(parent1, rname, inst(rinfo)) - } - case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => - tp.derivedWildcardType(bounds.derivedTypeBounds(inst(lo), instTop(hi))) - case tp => - inst.tyconIsHK = tp.isHK - val res = inst(tp) - tp match { - case tp: WildcardType => - println(s"inst $tp --> $res") - case _ => - } - res + val inst = new InstMap(self) + + def instTop(tp: Type): Type = tp.strictDealias match { + case tp: RecType => + inst.localRecs += tp + tp.rebind(instTop(tp.parent)) + case tp @ RefinedType(parent, rname, rinfo) => + rinfo match { + case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgName && inst.localRecs.contains(rt) => + val bounds @ TypeBounds(_, _) = self.member(sel).info + instTop(tp.derivedRefinedType(parent, rname, bounds.withBindingKind(NoBinding))) + case _ => + val parent1 = instTop(parent) + if (rname.isHkArgName && + !inst.tyconIsHK && + !inst.keptRefs.contains(rname)) parent1 + else tp.derivedRefinedType(parent1, rname, inst(rinfo)) + } + case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => + tp.derivedWildcardType(bounds.derivedTypeBounds(inst(lo), instTop(hi))) + case tp: LazyRef => + instTop(tp.ref) + case tp => + inst.tyconIsHK = tp.isHK + val res = inst(tp) + tp match { + case tp: WildcardType => + println(s"inst $tp --> $res") + case _ => + } + res + } + + def isLazy(tp: Type): Boolean = tp.strictDealias match { + case tp: RefinedOrRecType => isLazy(tp.parent) + case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => isLazy(hi) + case tp: LazyRef => true + case _ => false + } + + val reduced = + if (isLazy(self1)) { + // A strange dance is needed here to make 974.scala compile. + val res = LazyRef(() => instTop(self)) + res.ref // without this line, pickling 974.scala fails with an assertion error + // saying that we address a RecThis outside its Rec (in the case of RecThis of pickleNewType) + res // without this line, typing 974.scala gives a stackoverflow in asSeenFrom. } - val reduced = instTop(self) - if (inst.isSafe) reduced else self + else instTop(self) + if (reduced ne self) hk.println(i"reduce $self --> $reduced") + reduced case _ => self } - /** A type ref is eta expandable if it refers to a non-lambda class. + /** A type ref is eta expandable if it refers to a non-lambda class. * In that case we can look for parameterized base types of the type * to eta expand them. */ diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index b0c36ca58..c82dc6a39 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -661,7 +661,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } Config.newHK && app.isHKApply && !other.isHKApply && { - val reduced = app.BetaReduce(shortLived = true) + val reduced = app.BetaReduce if (reduced ne app) if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) else tryInfer(app.typeConstructor.dealias) @@ -1503,7 +1503,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean) = if (app.isHKApply) - traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.BetaReduce(shortLived = true)}") { + traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.BetaReduce}") { super.compareHkApply(app, other, inOrder) } else super.compareHkApply(app, other, inOrder) diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 46771a5aa..c6a18f305 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -158,7 +158,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. tp case tp: RefinedType => tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap)) - .BetaReduce() + .BetaReduce case tp: TypeAlias => tp.derivedTypeAlias(simplify(tp.alias, theMap)) case AndType(l, r) => @@ -384,7 +384,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce() match { + def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 4f5bec56b..dadb5b95e 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -822,7 +822,16 @@ object Types { /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. */ - final def dealias(implicit ctx: Context): Type = this match { + final def dealias(implicit ctx: Context): Type = strictDealias match { + case tp: LazyRef => tp.ref.dealias + case tp => tp + } + + /** Follow aliases and instantiated TypeVars until type + * is no longer alias type, or instantiated type variable. + * Do not follow LazyRefs + */ + final def strictDealias(implicit ctx: Context): Type = this match { case tp: TypeRef => if (tp.symbol.isClass) tp else tp.info match { @@ -832,19 +841,11 @@ object Types { case tp: TypeVar => val tp1 = tp.instanceOpt if (tp1.exists) tp1.dealias else tp - case tp: LazyRef => - tp.ref.dealias case tp: AnnotatedType => tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot) case tp => tp } - /** If this is a TypeAlias type, its alias otherwise this type itself */ - final def followTypeAlias(implicit ctx: Context): Type = this match { - case TypeAlias(alias) => alias - case _ => this - } - /** Perform successive widenings and dealiasings until none can be applied anymore */ final def widenDealias(implicit ctx: Context): Type = { val res = this.widen.dealias @@ -859,6 +860,12 @@ object Types { case _ => this } + /** If this is a TypeAlias type, its alias otherwise this type itself */ + final def followTypeAlias(implicit ctx: Context): Type = this match { + case TypeAlias(alias) => alias + case _ => this + } + /** If this is a (possibly aliased, annotated, and/or parameterized) reference to * a class, the class type ref, otherwise NoType. * @param refinementOK If `true` we also skip non-parameter refinements. @@ -1579,14 +1586,20 @@ object Types { // we might now get cycles over members that are in a refinement but that lack // a symbol. Without the following precaution i974.scala stackoverflows when compiled // with new hk scheme. - val saved = lastDenotation - if (name.isTypeName && lastDenotation != null && (lastDenotation.symbol ne NoSymbol)) + val savedDenot = lastDenotation + val savedSymbol = lastSymbol + if (prefix.isInstanceOf[RecThis] && name.isTypeName) { lastDenotation = ctx.anyTypeDenot + lastSymbol = NoSymbol + } try if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) else prefix.member(name) finally - if (lastDenotation eq ctx.anyTypeDenot) lastDenotation = saved + if (lastDenotation eq ctx.anyTypeDenot) { + lastDenotation = savedDenot + lastSymbol = savedSymbol + } } /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type @@ -2753,7 +2766,7 @@ object Types { myRepr } - override def toString = s"Skolem($info)" + override def toString = s"Skolem($hashCode)" } final class CachedSkolemType(info: Type) extends SkolemType(info) -- cgit v1.2.3 From ae1f248ff407b231455a43ecbaf4751c0bb2bbaa Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 4 Jun 2016 18:07:05 +0200 Subject: Normalize RecTypes on creation to avoid cycles. With this change, ski compiles (but with more errors than before). Without it, it goes into various infinite recursions. --- src/dotty/tools/dotc/core/Types.scala | 36 ++++++++++++++++++++++++------ tests/neg/ski.scala | 41 ++++++++++++++++++----------------- 2 files changed, 50 insertions(+), 27 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index dadb5b95e..403b49da6 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2254,14 +2254,32 @@ object Types { tp } */ + + /** Create a RecType, normalizing its contents. This means: + * + * 1. Nested Rec types on the type's spine are merged with the outer one. + * 2. Any refinement of the form `type T = z.T` on the spine of the type + * where `z` refers to the created rec-type is replaced by + * `type T`. This avoids infinite recursons later when we + * try to follow these references. + * TODO: Figure out how to guarantee absence of cycles + * of length > 1 + */ def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = { - var rt = new RecType(parentExp) - rt.parent match { - case rt2: RecType => - rt = rt2.derivedRecType(rt2.parent.substRecThis(rt, RecThis(rt2))) - case _ => + val rt = new RecType(parentExp) + def normalize(tp: Type): Type = tp.stripTypeVar match { + case tp: RecType => + normalize(tp.parent.substRecThis(tp, RecThis(rt))) + case tp @ RefinedType(parent, rname, rinfo) => + val rinfo1 = rinfo match { + case TypeAlias(TypeRef(RecThis(`rt`), `rname`)) => TypeBounds.empty + case _ => rinfo + } + tp.derivedRefinedType(normalize(parent), rname, rinfo1) + case tp => + tp } - unique(rt) + unique(rt.derivedRecType(normalize(rt.parent))) } def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = { val rt = this(parentExp) @@ -2747,7 +2765,11 @@ object Types { case that: RecThis => this.binder eq that.binder case _ => false } - override def toString = s"RecThis(${binder.hashCode})" + override def toString = + try s"RecThis(${binder.hashCode})" + catch { + case ex: NullPointerException => s"RecThis()" + } } // ----- Skolem types ----------------------------------------------- diff --git a/tests/neg/ski.scala b/tests/neg/ski.scala index b192dc9e2..3d44e77da 100644 --- a/tests/neg/ski.scala +++ b/tests/neg/ski.scala @@ -17,8 +17,8 @@ trait S2[x <: Term, y <: Term] extends Term { type eval = S2[x, y] } trait S3[x <: Term, y <: Term, z <: Term] extends Term { - type ap[v <: Term] = eval#ap[v] // error - type eval = x#ap[z]#ap[y#ap[z]]#eval // error // error + type ap[v <: Term] = eval#ap[v] // error: not a legal path + type eval = x#ap[z]#ap[y#ap[z]]#eval // error: not a legal path // error: not a legal path } // The K combinator @@ -31,8 +31,8 @@ trait K1[x <: Term] extends Term { type eval = K1[x] } trait K2[x <: Term, y <: Term] extends Term { - type ap[z <: Term] = eval#ap[z] // error - type eval = x#eval // error + type ap[z <: Term] = eval#ap[z] // error: not a legal path + type eval = x#eval // error: not a legal path } // The I combinator @@ -41,8 +41,8 @@ trait I extends Term { type eval = I } trait I1[x <: Term] extends Term { - type ap[y <: Term] = eval#ap[y] // error - type eval = x#eval // error + type ap[y <: Term] = eval#ap[y] // error: not a legal path + type eval = x#eval // error: not a legal path } // Constants @@ -64,9 +64,10 @@ case class Equals[A >: B <:B , B]() object Test { type T1 = Equals[Int, Int] // compiles fine - type T2 = Equals[String, Int] // error + type T2 = Equals[String, Int] // error: Type argument String does not conform to upper bound Int + type T3 = Equals[I#ap[c]#eval, c] - type T3a = Equals[I#ap[c]#eval, d] // error + type T3a = Equals[I#ap[c]#eval, d] // error: Type argument I1[c]#eval does not conform to upper bound d // Ic -> c type T4 = Equals[I#ap[c]#eval, c] @@ -75,29 +76,29 @@ object Test { type T5 = Equals[K#ap[c]#ap[d]#eval, c] // KKcde -> d - type T6 = Equals[K#ap[K]#ap[c]#ap[d]#ap[e]#eval, d] + type T6 = Equals[K#ap[K]#ap[c]#ap[d]#ap[e]#eval, d] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, e]#eval does not conform to upper bound d // SIIIc -> Ic - type T7 = Equals[S#ap[I]#ap[I]#ap[I]#ap[c]#eval, c] + type T7 = Equals[S#ap[I]#ap[I]#ap[I]#ap[c]#eval, c] // error: not a legal path // error: Type argument I1[_ <: Term]#eval#ap[_]#eval does not conform to upper bound c // SKKc -> Ic type T8 = Equals[S#ap[K]#ap[K]#ap[c]#eval, c] // SIIKc -> KKc - type T9 = Equals[S#ap[I]#ap[I]#ap[K]#ap[c]#eval, K#ap[K]#ap[c]#eval] + type T9 = Equals[S#ap[I]#ap[I]#ap[K]#ap[c]#eval, K#ap[K]#ap[c]#eval] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, _ <: Term]#eval does not conform to upper bound K2[K, c]#eval // SIKKc -> K(KK)c - type T10 = Equals[S#ap[I]#ap[K]#ap[K]#ap[c]#eval, K#ap[K#ap[K]]#ap[c]#eval] + type T10 = Equals[S#ap[I]#ap[K]#ap[K]#ap[c]#eval, K#ap[K#ap[K]]#ap[c]#eval] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, _ <: Term]#eval does not conform to upper bound K2[K1[K], c]#eval // SIKIc -> KIc - type T11 = Equals[S#ap[I]#ap[K]#ap[I]#ap[c]#eval, K#ap[I]#ap[c]#eval] + type T11 = Equals[S#ap[I]#ap[K]#ap[I]#ap[c]#eval, K#ap[I]#ap[c]#eval] // error: not a legal path // error: Type argument I1[_ <: Term]#eval#ap[_]#eval does not conform to upper bound K2[I, c]#eval // SKIc -> Ic type T12 = Equals[S#ap[K]#ap[I]#ap[c]#eval, c] // R = S(K(SI))K (reverse) type R = S#ap[K#ap[S#ap[I]]]#ap[K] - type T13 = Equals[R#ap[c]#ap[d]#eval, d#ap[c]#eval] + type T13 = Equals[R#ap[c]#ap[d]#eval, d#ap[c]#eval] // error: Type argument S3[I, S2[I, _ <: Term] @UnsafeNonvariant#y, _ <: Term]#eval does not conform to upper bound d#eval type b[a <: Term] = S#ap[K#ap[a]]#ap[S#ap[I]#ap[I]] @@ -106,27 +107,27 @@ object Test { type eval = A0 } trait A1 extends Term { - type ap[x <: Term] = x#ap[A0]#eval // error + type ap[x <: Term] = x#ap[A0]#eval // error: not a legal path type eval = A1 } trait A2 extends Term { - type ap[x <: Term] = x#ap[A1]#eval // error + type ap[x <: Term] = x#ap[A1]#eval // error: not a legal path type eval = A2 } type NN1 = b[R]#ap[b[R]]#ap[A0] - type T13a = Equals[NN1#eval, c] + type T13a = Equals[NN1#eval, c] // error: Type argument Test.NN1#eval does not conform to upper bound c // Double iteration type NN2 = b[R]#ap[b[R]]#ap[A1] - type T14 = Equals[NN2#eval, c] + type T14 = Equals[NN2#eval, c] // error: Type argument Test.NN2#eval does not conform to upper bound c // Triple iteration type NN3 = b[R]#ap[b[R]]#ap[A2] - type T15 = Equals[NN3#eval, c] + type T15 = Equals[NN3#eval, c] // error: Type argument Test.NN3#eval does not conform to upper bound c trait An extends Term { - type ap[x <: Term] = x#ap[An]#eval // error + type ap[x <: Term] = x#ap[An]#eval // error: not a legal path type eval = An } -- cgit v1.2.3 From 939d9da26ee5992c17cd1fae0a501ed66a49fb95 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 4 Jun 2016 18:21:11 +0200 Subject: Add a second betaReduce The new one only reduces straight applications of type lambdas with definite arguments. It is called very early on appliedTo, and derivedRefinedType. The old one, now renamed to normalizeHkApply also handles wildcard arguments and can garbage collect general unneeded hk-refinements. It is called later, at various places. TODO: See what functionality of normalizeHkApply should go into betaReduce instead. Maybe we can even drop normalizeHkApply? However: need to be careful to maintain aliases for hk type inference. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 2 +- src/dotty/tools/dotc/core/TypeApplications.scala | 13 ++++---- src/dotty/tools/dotc/core/TypeComparer.scala | 6 ++-- src/dotty/tools/dotc/core/TypeOps.scala | 4 +-- src/dotty/tools/dotc/core/Types.scala | 38 ++++++++++++++++++++-- tests/neg/hklower.scala | 2 +- 6 files changed, 50 insertions(+), 15 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 3b368ad5e..8072a111a 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -286,7 +286,7 @@ trait ConstraintHandling { else if (fromBelow) defn.NothingType else defn.AnyType case bound: RefinedType => - bound.BetaReduce + bound.normalizeHkApply case _ => bound } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 7b2d2c3b2..0de951c31 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -565,12 +565,12 @@ class TypeApplications(val self: Type) extends AnyVal { assert(!isHK, self) self match { case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias.BetaReduce)) + self.derivedTypeAlias(expand(self.alias.normalizeHkApply)) case self @ TypeBounds(lo, hi) => if (Config.newHK) - self.derivedTypeBounds(lo, expand(hi.BetaReduce)) + self.derivedTypeBounds(lo, expand(hi.normalizeHkApply)) else - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.BetaReduce))) + self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.normalizeHkApply))) case _ => expand(self) } } @@ -603,7 +603,7 @@ class TypeApplications(val self: Type) extends AnyVal { * - dropping refinements and rec-types * - going from a wildcard type to its upper bound */ - def BetaReduce(implicit ctx: Context): Type = self.strictDealias match { + def normalizeHkApply(implicit ctx: Context): Type = self.strictDealias match { case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName && self1.typeParams.isEmpty => val inst = new InstMap(self) @@ -840,8 +840,9 @@ class TypeApplications(val self: Type) extends AnyVal { } assert(args.nonEmpty) matchParams(self, typParams, args) match { - case refined @ RefinedType(_, pname, _) if pname.isHkArgName && !Config.newHK => - TypeRef(refined, tpnme.hkApplyOBS) + case refined @ RefinedType(_, pname, _) if pname.isHkArgName => + if (Config.newHK) refined.betaReduce + else TypeRef(refined, tpnme.hkApplyOBS) case refined => refined } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index c82dc6a39..c1cbe0752 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -661,7 +661,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } Config.newHK && app.isHKApply && !other.isHKApply && { - val reduced = app.BetaReduce + val reduced = app.normalizeHkApply if (reduced ne app) if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) else tryInfer(app.typeConstructor.dealias) @@ -675,7 +675,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val applied = other.appliedTo(argRefs(rt, args.length)) if (inOrder) isSubType(body, applied) else body match { - case body: TypeBounds => body.contains(applied) + case body: TypeBounds => body.contains(applied) // Can be dropped? case _ => isSubType(applied, body) } } @@ -1503,7 +1503,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean) = if (app.isHKApply) - traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.BetaReduce}") { + traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.normalizeHkApply}") { super.compareHkApply(app, other, inOrder) } else super.compareHkApply(app, other, inOrder) diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index c6a18f305..ca49d3d3c 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -158,7 +158,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. tp case tp: RefinedType => tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap)) - .BetaReduce + .normalizeHkApply case tp: TypeAlias => tp.derivedTypeAlias(simplify(tp.alias, theMap)) case AndType(l, r) => @@ -384,7 +384,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias.BetaReduce match { + def normalizeToRef(tp: Type): TypeRef = tp.dealias.normalizeHkApply match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 403b49da6..2fa4f94c1 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2131,9 +2131,43 @@ object Types { this } - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType = + def betaReduce(implicit ctx: Context): Type = refinedInfo match { + case TypeAlias(alias) => + def instantiate(rt: RecType) = new TypeMap { + def apply(t: Type) = t match { + case TypeRef(RecThis(`rt`), `refinedName`) => alias + case tp: TypeRef => + val pre1 = apply(tp.prefix) + if (pre1 ne tp.prefix) tp.newLikeThis(pre1) else tp + case _ => mapOver(t) + } + } + def substAlias(tp: Type): Type = tp.safeDealias match { + case tp @ RefinedType(p, rname, rinfo) if tp.isTypeParam => + if (rname == refinedName) p // check bounds? + else tp.derivedRefinedType(substAlias(p), rname, rinfo) + case tp: RecType => + val p1 = substAlias(tp.parent) + if (p1 ne tp.parent) tp.rebind(instantiate(tp)(p1)) + else tp + case _ => + tp + } + val reduced = substAlias(parent) + if (reduced ne parent) { + hk.println(i"REDUCE $this ----> ${reduced}") + reduced + } + else this + case _ => + this + } + + def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this - else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) + else + RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) + .betaReduce /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = diff --git a/tests/neg/hklower.scala b/tests/neg/hklower.scala index 5c1ba27ba..e29a1545e 100644 --- a/tests/neg/hklower.scala +++ b/tests/neg/hklower.scala @@ -1,4 +1,4 @@ -class Test { // error conflicting bounds +class Test { type T[X] // OK type U[X] = T[X] // OK -- cgit v1.2.3 From c35f817f43a776b567acef3816405b5373097842 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 5 Jun 2016 10:54:03 +0200 Subject: Adapt widenForMatchSelector to new HK scheme --- src/dotty/tools/dotc/typer/Inferencing.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index f880b647e..3b79d7c4c 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -175,8 +175,14 @@ object Inferencing { /** Recursively widen and also follow type declarations and type aliases. */ def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match { - case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi) - case tp: AnnotatedType => tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot) + case tp: TypeRef if !tp.symbol.isClass => + widenForMatchSelector(tp.info.bounds.hi) + case tp: AnnotatedType => + tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot) + case tp @ RefinedType(parent, rname, rinfo) if !parent.typeSymbol.isClass => + tp.derivedRefinedType(widenForMatchSelector(parent), rname, rinfo) + case tp: RecType if !tp.parent.typeSymbol.isClass => + tp.derivedRecType(widenForMatchSelector(tp.parent)) case tp => tp } @@ -212,7 +218,7 @@ object Inferencing { val qualifies = (tvar: TypeVar) => (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy def interpolate() = Stats.track("interpolateUndetVars") { - val tp = tree.tpe.widen + val tp = tree.tpe.widen // TODO add `.BetaReduce` ? constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}") constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}") -- cgit v1.2.3 From e36a36a5aea93c19aa133ffb215bc05787378375 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 5 Jun 2016 17:41:45 +0200 Subject: Fix typeParams for abstract types under completion Their type parameters are not the type parameters in the completer, but hk type parameters with the same variances. --- src/dotty/tools/dotc/core/TypeApplications.scala | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 0de951c31..be0eb9230 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -47,19 +47,20 @@ object TypeApplications { def variancesConform(syms1: List[MemberBinding], syms2: List[MemberBinding])(implicit ctx: Context) = syms1.corresponds(syms2)(varianceConforms) - def fallbackTypeParams(n: Int)(implicit ctx: Context): List[MemberBinding] = { - def memberBindings(n: Int): Type = - if (n == 0) NoType - else + def fallbackTypeParams(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = { + def memberBindings(vs: List[Int]): Type = vs match { + case Nil => NoType + case v :: vs1 => RefinedType( - memberBindings(n - 1), - tpnme.hkArg(n - 1), - TypeBounds.empty.withBindingKind(NonvariantBinding)) + memberBindings(vs1), + tpnme.hkArg(vs1.length), + TypeBounds.empty.withBindingKind(BindingKind.fromVariance(v))) + } def decompose(t: Type, acc: List[MemberBinding]): List[MemberBinding] = t match { case t: RefinedType => decompose(t.parent, t :: acc) case NoType => acc } - decompose(memberBindings(n), Nil) + decompose(memberBindings(variances), Nil) } /** Extractor for @@ -343,7 +344,7 @@ class TypeApplications(val self: Type) extends AnyVal { else tsym.infoOrCompleter match { case completer: TypeParamsCompleter => val tparams = completer.completerTypeParams(tsym) - if (Config.newHK) tparams + if (Config.newHK) fallbackTypeParams(tparams.map(_.variance)) else defn.LambdaTraitOBS(tparams.map(_.variance)).typeParams case _ => if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams @@ -861,7 +862,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") - if (Config.newHK) fallbackTypeParams(args.length) + if (Config.newHK) fallbackTypeParams(args map alwaysZero) else defn.LambdaTraitOBS(args map alwaysZero).typeParams case _ => typeParams -- cgit v1.2.3 From 6a7e466fa2ff5d6d01a25bed0c685188c9a84a63 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:41:36 +0200 Subject: Avoid accidental creation of hk types - Swap subtype tests Previous order could create constraints where hk type parameters got a * bound. (this is now caught in an assert). - Make underlyingClassRef work for hk types under new scheme. - Ensure that toAvoid does not creat hk from * types - Let getClass return a * type We will be pickier than before. An unapplied type such as `java.lang.Class` will always be an hk type. Hence, the type of getClass has to be appleid to [_] to make it a * type. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 2 ++ src/dotty/tools/dotc/core/Definitions.scala | 2 +- src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- src/dotty/tools/dotc/core/Types.scala | 11 +++++++---- src/dotty/tools/dotc/typer/TypeAssigner.scala | 5 +++-- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 8072a111a..e7b05af43 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -239,6 +239,8 @@ trait ConstraintHandling { def addParamBound(bound: PolyParam) = if (fromBelow) addLess(bound, param) else addLess(param, bound) + assert(param.isHK == bound.isHK, s"$param / $bound / $fromBelow") + /** Drop all constrained parameters that occur at the toplevel in `bound` and * handle them by `addLess` calls. * The preconditions make sure that such parameters occur only diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 1311bb583..44d36abc5 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -167,7 +167,7 @@ class Definitions { lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) - lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef), Final) + lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final) lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index c1cbe0752..db41d85aa 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -375,7 +375,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { compareAliasedRefined(tp2, tp1, inOrder = false) } else // fast path, in particular for refinements resulting from parameterization. - isSubType(tp1, skipped2) && + isSubType(tp1, skipped2) && // TODO swap? isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) } compareRefined @@ -495,9 +495,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - isNewSubType(tp1.parent, tp2) || compareHkApply(tp1, tp2, inOrder = true) || compareHkLambda(tp1, tp2, inOrder = true) || + isNewSubType(tp1.parent, tp2) || compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => isNewSubType(tp1.parent, tp2) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 2fa4f94c1..ab5c795e3 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -875,13 +875,16 @@ object Types { if (tp.symbol.isClass) tp else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK) else NoType - case tp: AnnotatedType => tp.underlying.underlyingClassRef(refinementOK) + case tp: AnnotatedType => + tp.underlying.underlyingClassRef(refinementOK) case tp: RefinedType => def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName) - if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK) + if (refinementOK || tp.isTypeParam || isParamName) tp.underlying.underlyingClassRef(refinementOK) else NoType - case tp: RecType if refinementOK => tp.parent - case _ => NoType + case tp: RecType => + tp.underlying.underlyingClassRef(refinementOK) + case _ => + NoType } /** The iterator of underlying types as long as type is a TypeProxy. diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index 4752d2827..3f3108ac2 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -103,7 +103,8 @@ trait TypeAssigner { val refinedInfo1 = apply(rinfo) if (toAvoid(refinedInfo1)) { typr.println(s"dropping refinement from $tp") - parent1 + if (name.isTypeName) tp.derivedRefinedType(parent1, name, TypeBounds.empty) + else parent1 } else { tp.derivedRefinedType(parent1, name, refinedInfo1) } @@ -144,7 +145,7 @@ trait TypeAssigner { * which are accessible. * * Also performs the following normalizations on the type `tpe`. - * (1) parameter accessors are alwys dereferenced. + * (1) parameter accessors are always dereferenced. * (2) if the owner of the denotation is a package object, it is assured * that the package object shows up as the prefix. */ -- cgit v1.2.3 From 8e84fb013abeb613af4a3414b836f02140e2e866 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:43:44 +0200 Subject: Eta-expand unapplied types that have type parameters We would like to change from a scheme where eta-expansion was prototype driven to one where unapplied parameterized types are always eta expanded. The reason is that we might miss some eta expansions due to cyclic references. run/colltest4 is an exmaple. Here, we missed an eta expansion in the type of Iterator. The class definition is: trait Iterable[+A] extends IterableOnce[A] with FromIterable[Iterable] { We'd expect that the second parent would expand to FromIterable[[X0] -> Iterable[X0]] But we miss the expansion because at the time we complete Iterable we have not completed FromIterable yet. In fact this happens in both the old and the new hk scheme. But in the old scheme we did not notice the error whereas in the new scheme we get an error in PostTyper that the type Iterable does not conform to its bound `[X0] -> Iterable[X0]`. With this commit, we change the scheme, so that eta-expansion depends on the type parameters of a type itself, instead of the expected type. We should investigate whether we can do a similar change for Scala2 classloading. Check kinds of type parameters Also, do not allow a hk type if the bound is a * type. --- src/dotty/tools/dotc/core/TypeApplications.scala | 11 +++++++---- src/dotty/tools/dotc/typer/Applications.scala | 3 ++- src/dotty/tools/dotc/typer/Checking.scala | 7 ++++++- src/dotty/tools/dotc/typer/Namer.scala | 8 +++++--- src/dotty/tools/dotc/typer/ProtoTypes.scala | 3 +++ src/dotty/tools/dotc/typer/Typer.scala | 17 +++++++++++++---- tests/neg/kinds.scala | 18 ++++++++++++++++++ tests/pos/jon.scala | 2 +- tests/pos/range.scala | 4 ++-- tests/pos/t2613.scala | 2 +- tests/pos/tycons.scala | 22 ---------------------- 11 files changed, 58 insertions(+), 39 deletions(-) create mode 100644 tests/neg/kinds.scala delete mode 100644 tests/pos/tycons.scala diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index be0eb9230..1700a9c9c 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -378,15 +378,18 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /** If `self` is a higher-kinded type, its type parameters $hk_i, otherwise Nil */ final def hkTypeParams(implicit ctx: Context): List[MemberBinding] = if (Config.newHK) if (isHK) typeParams else Nil else LambdaTraitOBS.typeParams - final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = { - val tparams = typeParams - assert(tparams.isEmpty || tparams.head.isInstanceOf[Symbol], self) - tparams.asInstanceOf[List[TypeSymbol]] + /** If `self` is a generic class, its type parameter symbols, otherwise Nil */ + final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match { + case (_: Symbol) :: _ => + assert(typeParams.forall(_.isInstanceOf[Symbol])) + typeParams.asInstanceOf[List[TypeSymbol]] + case _ => Nil } /** The named type parameters declared or inherited by this type. diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 14071e27c..cdbf692cd 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -17,6 +17,7 @@ import Types._ import Decorators._ import ErrorReporting._ import Trees._ +import config.Config import Names._ import StdNames._ import ProtoTypes._ @@ -644,7 +645,7 @@ trait Applications extends Compatibility { self: Typer => } def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = - tree.withType(tree.tpe.etaExpandIfHK(bound)) + if (Config.newHK) tree else tree.withType(tree.tpe.etaExpandIfHK(bound)) /** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray. * It is performed during typer as creation of generic arrays needs a classTag. diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 6944197a1..cfad4e77e 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -36,11 +36,16 @@ object Checking { /** A general checkBounds method that can be used for TypeApply nodes as * well as for AppliedTypeTree nodes. */ - def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) = + def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) = { + (args, boundss).zipped.foreach { (arg, bound) => + if (!bound.isHK && arg.tpe.isHK) + ctx.error(d"missing type parameter(s) for $arg", arg.pos) + } for ((arg, which, bound) <- ctx.boundsViolations(args, boundss, instantiate)) ctx.error( d"Type argument ${arg.tpe} does not conform to $which bound $bound ${err.whyNoMatchStr(arg.tpe, bound)}", arg.pos) + } /** Check that type arguments `args` conform to corresponding bounds in `poly` * Note: This does not check the bounds of AppliedTypeTrees. These diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index a8f3b8918..7982f288d 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -14,6 +14,7 @@ import collection.mutable import annotation.tailrec import ErrorReporting._ import tpd.ListOfTreeDecorator +import config.Config import config.Printers._ import Annotations._ import Inferencing._ @@ -591,7 +592,7 @@ class Namer { typer: Typer => */ def parentType(parent: untpd.Tree)(implicit ctx: Context): Type = if (parent.isType) { - typedAheadType(parent).tpe + typedAheadType(parent, AnyTypeConstructorProto).tpe } else { val (core, targs) = stripApply(parent) match { case TypeApply(core, targs) => (core, targs) @@ -973,7 +974,8 @@ class Namer { typer: Typer => ensureUpToDate(sym.typeRef, dummyInfo) ensureUpToDate(sym.typeRef.appliedTo(tparamSyms.map(_.typeRef)), TypeBounds.empty) - etaExpandArgs.apply(sym.info) + if (Config.newHK) sym.info + else etaExpandArgsOBS.apply(sym.info) } /** Eta expand all class types C appearing as arguments to a higher-kinded @@ -982,7 +984,7 @@ class Namer { typer: Typer => * of arguments in F-bounds, because the recursive type was initialized with * TypeBounds.empty. */ - def etaExpandArgs(implicit ctx: Context) = new TypeMap { + def etaExpandArgsOBS(implicit ctx: Context) = new TypeMap { def apply(tp: Type): Type = tp match { case tp: RefinedType => val args = tp.argInfos.mapconserve(this) diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala index 740258821..68fd99b3f 100644 --- a/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -311,6 +311,9 @@ object ProtoTypes { */ @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways + /** A prototype for type constructors that are followed by a type application */ + @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways + /** Add all parameters in given polytype `pt` to the constraint's domain. * If the constraint contains already some of these parameters in its domain, * make a copy of the polytype and add the copy's type parameters instead. diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 225516503..33a94f5c7 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -928,7 +928,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") { - val tpt1 = typed(tree.tpt)(ctx retractMode Mode.Pattern) + val tpt1 = typed(tree.tpt, AnyTypeConstructorProto)(ctx.retractMode(Mode.Pattern)) val tparams = tpt1.tpe.typeParams if (tparams.isEmpty) { ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos) @@ -1672,6 +1672,17 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit } } + def adaptType(tp: Type): Tree = { + val tree1 = + if (pt != AnyTypeConstructorProto && tp.typeParamSymbols.nonEmpty) { + println(i"lam abs $tp with tparams ${tp.typeParamSymbols}%, %") + tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols)) + } + else tree + if ((ctx.mode is Mode.Pattern) || tree1.tpe <:< pt) tree1 + else err.typeMismatch(tree1, pt) + } + tree match { case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[_] => tree case _ => tree.tpe.widen match { @@ -1705,9 +1716,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit (_, _) => tree // error will be reported in typedTypeApply } case _ => - if (ctx.mode is Mode.Type) - if ((ctx.mode is Mode.Pattern) || tree.tpe <:< pt) tree - else err.typeMismatch(tree, pt) + if (ctx.mode is Mode.Type) adaptType(tree.tpe) else adaptNoArgs(wtp) } } diff --git a/tests/neg/kinds.scala b/tests/neg/kinds.scala new file mode 100644 index 000000000..312c5d45e --- /dev/null +++ b/tests/neg/kinds.scala @@ -0,0 +1,18 @@ +object Test { + + class C[T] + class C2[T[X]] + + class B + + val x: C[C] = ??? // error: missing type parameter(s) + val y: C2[C] = ??? + + def f[T] = ??? + + def f2[T[X]] = ??? + + f[C] // error: missing type parameter(s) + f2[C] + +} diff --git a/tests/pos/jon.scala b/tests/pos/jon.scala index d4ea74f02..224486945 100644 --- a/tests/pos/jon.scala +++ b/tests/pos/jon.scala @@ -4,5 +4,5 @@ object Test { val x = List(List, Vector) - val y: List[scala.collection.generic.SeqFactory] = x + val y: List[scala.collection.generic.SeqFactory[_]] = x } diff --git a/tests/pos/range.scala b/tests/pos/range.scala index 9e7b5d1c9..a33f7fcee 100644 --- a/tests/pos/range.scala +++ b/tests/pos/range.scala @@ -1,8 +1,8 @@ import scala.math._ import collection.immutable.NumericRange object Test { - val r1: scala.collection.immutable.Range.Partial = ??? - val r2: scala.Range.Partial = r1 + val r1: scala.collection.immutable.Range.Partial[_, _] = ??? + val r2: scala.Range.Partial[_, _] = r1 def until(d: BigDecimal, end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Exclusive[BigDecimal]] = new Range.Partial(until(d, end, _)) def until(d: BigDecimal, end: BigDecimal, step: BigDecimal) = Range.BigDecimal(d, end, step) diff --git a/tests/pos/t2613.scala b/tests/pos/t2613.scala index c234d4c0d..17ebe2d7e 100644 --- a/tests/pos/t2613.scala +++ b/tests/pos/t2613.scala @@ -5,7 +5,7 @@ object Test { abstract class MyRelation [R <: Row, +Relation <: MyRelation[R, Relation]] - type M = MyRelation[_ <: Row, _ <: MyRelation] + type M = MyRelation[_ <: Row, _ <: MyRelation[_, _]] val (x,y): (String, M) = null } diff --git a/tests/pos/tycons.scala b/tests/pos/tycons.scala deleted file mode 100644 index 1ed4d2855..000000000 --- a/tests/pos/tycons.scala +++ /dev/null @@ -1,22 +0,0 @@ -class TypeConstructor { - type TypeArg -} - -trait List[+T] extends TypeConstructor { type TypeArg <: T } - -trait Set[T] extends TypeConstructor { type TypeArg <: T } - -object obj extends List[Number] with Set[Exception] { - val x: TypeArg = ??? - val n: Number = x - val e: Exception = x -} - -abstract class Functor[F <: TypeConstructor] { - def map[A, B](f: F { type TypeArg <: A }): F { type TypeArg <: B } -} - -object ListFunctor extends Functor[List] { - override def map[A, B](f: List { type TypeArg <: A }): List { type TypeArg <: B } = ??? -} - -- cgit v1.2.3 From 68e73e854e04f7bea20a8c95637729bf6889e17d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:44:55 +0200 Subject: Restrict betaReduce to hk applications Also, handle LazyRefs conservatively in isReferredTo Without the change we risk losing RecTypes because it looks like nobody refers to them. This was observed for pos/i974.scala. --- src/dotty/tools/dotc/core/TypeApplications.scala | 10 +++++++++- src/dotty/tools/dotc/core/Types.scala | 4 +++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 1700a9c9c..4f482f460 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -501,7 +501,7 @@ class TypeApplications(val self: Type) extends AnyVal { false } - /** Dealias type if it can be done without forcing anything */ + /** Dealias type if it can be done without forcing the TypeRef's info */ def safeDealias(implicit ctx: Context): Type = self match { case self: TypeRef if self.denot.exists && self.symbol.isAliasType => self.info.bounds.hi.stripTypeVar.safeDealias @@ -509,6 +509,14 @@ class TypeApplications(val self: Type) extends AnyVal { self } + /** Dealias type if it can be done without forcing anything */ + def saferDealias(implicit ctx: Context): Type = self match { + case self: TypeRef if self.denot.exists && self.symbol.isAliasType && self.symbol.isCompleted => + self.info.bounds.hi.stripTypeVar.safeDealias + case _ => + self + } + /** Replace references to type parameters with references to hk arguments `this.$hk_i` * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. */ diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index ab5c795e3..7a050c412 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2135,7 +2135,7 @@ object Types { } def betaReduce(implicit ctx: Context): Type = refinedInfo match { - case TypeAlias(alias) => + case TypeAlias(alias) if refinedName.isHkArgName => def instantiate(rt: RecType) = new TypeMap { def apply(t: Type) = t match { case TypeRef(RecThis(`rt`), `refinedName`) => alias @@ -2265,6 +2265,8 @@ object Types { tp match { case tp: TypeRef => apply(x, tp.prefix) case tp: RecThis => RecType.this eq tp.binder + case tp: LazyRef => true // Assume a reference to be safe. + // TODO: Check that all accumulators handle LazyRefs correctly case _ => foldOver(x, tp) } } -- cgit v1.2.3 From 463e99a48996fbf7148aa62ec6d2f8b28000d2d4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 7 Jun 2016 13:28:55 +0200 Subject: Optionally, check kinds match for & and | Optionally, check kinds of operands of & and | match. --- src/dotty/tools/dotc/config/Config.scala | 5 ++++ src/dotty/tools/dotc/core/TypeApplications.scala | 30 +++++++++++++++++++++++- src/dotty/tools/dotc/core/Types.scala | 4 ++++ 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 63ecdc76d..1c22329f1 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -79,6 +79,11 @@ object Config { */ final val checkProjections = false + /** If this flag is set it is checked that &/| only apply to types + * that are either both hk types or both * types. + */ + final val checkKinds = true + /** The recursion depth for showing a summarized string */ final val summarizeDepth = 2 diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 4f482f460..6fc1fb9dc 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -473,6 +473,31 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => false } + /** Computes the kind of `self` without forcing anything. + * @return 1 if type is known to be higher-kinded + * -1 if type is known to be a * type + * 0 if kind of `self` is unknown (because symbols have not yet completed) + */ + def knownHK(implicit ctx: Context): Int = self match { + case self: TypeRef => + val tsym = self.symbol + if (tsym.isClass) -1 + else tsym.infoOrCompleter match { + case completer: TypeParamsCompleter => + if (completer.completerTypeParams(tsym).nonEmpty) 1 else -1 + case _ => + if (!tsym.isCompleting || tsym.isAliasType) tsym.info.knownHK + else 0 + } + case self: RefinedType => + if (self.isTypeParam) 1 else -1 + case self: SingletonType => -1 + case self: TypeVar => self.origin.knownHK + case self: WildcardType => self.optBounds.knownHK + case self: TypeProxy => self.underlying.knownHK + case _ => -1 + } + /** is receiver of the form T#$Apply? */ def isHKApply(implicit ctx: Context): Boolean = self match { case self @ RefinedType(_, name, _) => Config.newHK && name.isHkArgName && !self.isTypeParam @@ -666,7 +691,10 @@ class TypeApplications(val self: Type) extends AnyVal { res // without this line, typing 974.scala gives a stackoverflow in asSeenFrom. } else instTop(self) - if (reduced ne self) hk.println(i"reduce $self --> $reduced") + if (reduced ne self) { + hk.println(i"reduce $self --> $reduced / ${inst.tyconIsHK}") + //hk.println(s"reduce $self --> $reduced") + } reduced case _ => self } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 7a050c412..986a9c292 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2358,6 +2358,8 @@ object Types { object AndType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType]) + if (Config.checkKinds) + assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 & $tp2 / " + s"$tp1 & $tp2") unchecked(tp1, tp2) } def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = { @@ -2392,6 +2394,8 @@ object Types { object OrType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { assertUnerased() + if (Config.checkKinds) + assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 | $tp2") unique(new CachedOrType(tp1, tp2)) } def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type = -- cgit v1.2.3 From 0a5f8394a4a226bd3dcf9c966495e653a25ed7d2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:46:39 +0200 Subject: Avoid infinite recursion when comparing recursive types. The previous scheme goes into an infinite recursion if the recursive type does not contain a reference to itself. Also, make typeParams more defensive The problematic case is something like { z => CC { type hk$0 = z.hk$0; type(param) hk$0 } Here $hk0 becomes a type parameter through CC and the type lambda. It's true that such types are eliminated later on. But we want to avoid mispredictions at all points. --- src/dotty/tools/dotc/core/TypeApplications.scala | 4 ++-- src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- src/dotty/tools/dotc/typer/Typer.scala | 7 ++----- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 6fc1fb9dc..fc8876d09 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -364,9 +364,9 @@ class TypeApplications(val self: Type) extends AnyVal { val sym = self.parent.classSymbol if (sym.isLambdaTraitOBS) return sym.typeParams } - val precedingParams = self.parent.typeParams + val precedingParams = self.parent.typeParams.filterNot(_.memberName == self.refinedName) if (Config.newHK && self.isTypeParam) precedingParams :+ self - else precedingParams.filterNot(_.memberName == self.refinedName) + else precedingParams case self: RecType => self.parent.typeParams case self: SingletonType => diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index db41d85aa..dfe94de44 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -381,7 +381,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { compareRefined case tp2: RecType => val tp1stable = ensureStableSingleton(tp1) - isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.substRecThis(tp2, tp1stable)) + isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) case OrType(tp21, tp22) => // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22) // and analogously for T1 <: T21 | (T221 & T222) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 33a94f5c7..34be65591 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1674,11 +1674,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def adaptType(tp: Type): Tree = { val tree1 = - if (pt != AnyTypeConstructorProto && tp.typeParamSymbols.nonEmpty) { - println(i"lam abs $tp with tparams ${tp.typeParamSymbols}%, %") - tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols)) - } - else tree + if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree + else tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols)) if ((ctx.mode is Mode.Pattern) || tree1.tpe <:< pt) tree1 else err.typeMismatch(tree1, pt) } -- cgit v1.2.3 From 31af865656ecc352c39ce919981e9b50d42a3237 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:48:40 +0200 Subject: Fix condition for lambda abstracting in Namer The previous condition could make a (derived) type a * type for a little while even though it had type parameters. This loophole caused collection/generic/MapFactory.scala and with it compile-stdlib to fail. Refinement for knownHK for PolyParams pos test t2082.scala shows that knownHK can be constructed before the binder PolyType of a PolyParam is initialized. --- src/dotty/tools/dotc/core/TypeApplications.scala | 10 +++------- src/dotty/tools/dotc/core/Types.scala | 6 +++++- src/dotty/tools/dotc/typer/Namer.scala | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index fc8876d09..c0728a8fb 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -494,7 +494,9 @@ class TypeApplications(val self: Type) extends AnyVal { case self: SingletonType => -1 case self: TypeVar => self.origin.knownHK case self: WildcardType => self.optBounds.knownHK + case self: PolyParam => self.underlying.knownHK case self: TypeProxy => self.underlying.knownHK + case NoType => 0 case _ => -1 } @@ -666,13 +668,7 @@ class TypeApplications(val self: Type) extends AnyVal { instTop(tp.ref) case tp => inst.tyconIsHK = tp.isHK - val res = inst(tp) - tp match { - case tp: WildcardType => - println(s"inst $tp --> $res") - case _ => - } - res + inst(tp) } def isLazy(tp: Type): Boolean = tp.strictDealias match { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 986a9c292..d3e97b492 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2765,7 +2765,11 @@ object Types { def paramName = binder.paramNames(paramNum) - override def underlying(implicit ctx: Context): Type = binder.paramBounds(paramNum) + override def underlying(implicit ctx: Context): Type = { + val bounds = binder.paramBounds + if (bounds == null) NoType // this can happen if the PolyType is not initialized yet + else bounds(paramNum) + } // no customized hashCode/equals needed because cycle is broken in PolyType override def toString = s"PolyParam($paramName)" diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 7982f288d..bc8f8e281 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -935,7 +935,7 @@ class Namer { typer: Typer => //val toParameterize = tparamSyms.nonEmpty && !isDerived //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived def abstracted(tp: Type): Type = - if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms) + if (tparamSyms.nonEmpty && !tp.isHK) tp.LambdaAbstract(tparamSyms) //else if (toParameterize) tp.parameterizeWith(tparamSyms) else tp -- cgit v1.2.3 From 5041e9311bf1845d09c6eeccba816214fe25a3e6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 7 Jun 2016 14:17:52 +0200 Subject: Make TypeAccumulators follow LazyRefs TypeMaps do the same, so it is logical, and helps prevent subtle errors as when we mispredicted whether a RecType contains references that point to it. Also, add normalizeHkApply to homogenize Fixes some discrepancies in Tasty typing. Also, homogenize skolem types Skolem types are eliminated by pickling, so they should not appear in the "before-pickling" output. --- src/dotty/tools/dotc/core/Types.scala | 3 +++ src/dotty/tools/dotc/printing/PlainPrinter.scala | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index d3e97b492..b779cbf3e 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -3708,6 +3708,9 @@ object Types { case tp: JavaArrayType => this(x, tp.elemType) + case tp: LazyRef => + this(x, tp.ref) + case tp: ProtoType => tp.fold(x, this) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 20bf8b407..a9f5b771a 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -53,6 +53,10 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp @ TypeRef(_, tpnme.hkApplyOBS) => val tp1 = tp.reduceProjection if (tp1 eq tp) tp else homogenize(tp1) + case tp: RefinedType => + tp.normalizeHkApply + case tp: SkolemType => + homogenize(tp.info) case tp: LazyRef => homogenize(tp.ref) case _ => -- cgit v1.2.3 From f1bf78bf8ceb17bfe0b9dc57a6e6f03a9b59065f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 8 Jun 2016 09:45:56 +0200 Subject: Avoid creating dependent function types for closures Without this step, anonymous functions can have dependent types which causes the parameter references to "leak out" to types in the environment in illegal ways. This caused a tasty failure for Typer before (not sure why the failure was not observed under the old hk scheme). --- src/dotty/tools/dotc/typer/Typer.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 34be65591..96bc2ab35 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1042,7 +1042,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit val tparams1 = tparams mapconserve (typed(_).asInstanceOf[TypeDef]) val vparamss1 = vparamss nestedMapconserve (typed(_).asInstanceOf[ValDef]) if (sym is Implicit) checkImplicitParamsNotSingletons(vparamss1) - val tpt1 = checkSimpleKinded(typedType(tpt)) + var tpt1 = checkSimpleKinded(typedType(tpt)) var rhsCtx = ctx if (sym.isConstructor && !sym.isPrimaryConstructor && tparams1.nonEmpty) { @@ -1054,6 +1054,12 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit rhsCtx.gadt.setBounds(tdef.symbol, TypeAlias(tparam.typeRef))) } val rhs1 = typedExpr(ddef.rhs, tpt1.tpe)(rhsCtx) + if (sym.isAnonymousFunction) { + // If we define an anonymous function, make sure the return type does not + // refer to parameters. This is necessary because closure types are + // function types so no dependencies on parameters are allowed. + tpt1 = tpt1.withType(avoid(tpt1.tpe, vparamss1.flatMap(_.map(_.symbol)))) + } assignType(cpy.DefDef(ddef)(name, tparams1, vparamss1, tpt1, rhs1), sym) //todo: make sure dependent method types do not depend on implicits or by-name params } -- cgit v1.2.3 From 830b72432fc02b86e798da24b084264881cbc392 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:54:09 +0200 Subject: Change tests - compileMixed failed because there was a cycle between immutable.Seq (compiled) and parallel.ParSeq (loaded from classfile). Inspection of the completion log (turn completions Printer on) and the stack trace showed that there's nothing we can do here. The old hk scheme did not go into the cycle because it did not force an unrelated type. I believe with enough tweaking we would also hva egotten a cycle in the old hk scheme. The test is "fixed" by adding parallel.ParSeq to the files to compile. - Disable named parameter tests Those tests do not work yet with the revised hk scheme. Before trying to fix this, we should first decide what parts of named parameters should be kept. --- src/dotty/tools/dotc/core/TypeComparer.scala | 15 + src/dotty/tools/dotc/core/tasty/TreePickler.scala | 4 + src/dotty/tools/dotc/sbt/ExtractDependencies.scala | 2 +- src/dotty/tools/dotc/typer/Implicits.scala | 4 +- test/dotc/tests.scala | 1 + tests/disabled/neg/named-params.scala | 37 ++ tests/disabled/pos/CollectionStrawMan3.scala | 408 +++++++++++++++++++++ tests/disabled/pos/flowops.scala | 31 ++ tests/disabled/pos/flowops1.scala | 39 ++ tests/disabled/pos/hk-named.scala | 58 +++ tests/disabled/pos/named-params.scala | 90 +++++ tests/neg/named-params.scala | 37 -- tests/pos/CollectionStrawMan3.scala | 408 --------------------- tests/pos/flowops.scala | 31 -- tests/pos/flowops1.scala | 39 -- tests/pos/hk-named.scala | 58 --- tests/pos/named-params.scala | 90 ----- 17 files changed, 686 insertions(+), 666 deletions(-) create mode 100644 tests/disabled/neg/named-params.scala create mode 100644 tests/disabled/pos/CollectionStrawMan3.scala create mode 100644 tests/disabled/pos/flowops.scala create mode 100644 tests/disabled/pos/flowops1.scala create mode 100644 tests/disabled/pos/hk-named.scala create mode 100644 tests/disabled/pos/named-params.scala delete mode 100644 tests/neg/named-params.scala delete mode 100644 tests/pos/CollectionStrawMan3.scala delete mode 100644 tests/pos/flowops.scala delete mode 100644 tests/pos/flowops1.scala delete mode 100644 tests/pos/hk-named.scala delete mode 100644 tests/pos/named-params.scala diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index dfe94de44..5f28d07eb 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -657,6 +657,21 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { if (inOrder) unifyWith(other) else testLifted(other, app, hkTypeParams, unifyWith) case _ => + // why only handle the case where one of the sides is a typevar or poly param? + // If the LHS is a hk application, then the normal logic already handles + // all other cases. Indeed, say you have + // + // type C[T] <: List[T] + // + // where C is an abstract type. Then to verify `C[Int] <: List[Int]`, + // use compareRefinedslow to get `C <: List` and verify that + // + // C#List$T = C$$hk0 = Int + // + // If the RHS is a hk application, we can also go through + // the normal logic because lower bounds are not parameterized. + // If were to re-introduce parameterized lower bounds of hk types + // we'd have to add some logic to handle them here. false } } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 9f703b5af..9be5c8bcf 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -76,6 +76,10 @@ class TreePickler(pickler: TastyPickler) { case Some(label) => if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym) case None => + // See pos/t1957.scala for an example where this can happen. + // I believe it's a bug in typer: the type of an implicit argument refers + // to a closure parameter outside the closure itself. TODO: track this down, so that we + // can eliminate this case. ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos) pickleForwardSymRef(sym) } diff --git a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 181d6a2d7..1f19a1058 100644 --- a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -164,7 +164,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp sym.isEffectiveRoot || sym.isAnonymousFunction || sym.isAnonymousClass || - sym.isLambdaTrait + sym.isLambdaTraitOBS private def addInheritanceDependency(sym: Symbol): Unit = _topLevelInheritanceDependencies += sym.topLevelClass diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index e21a08fb8..1b02f7e70 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -284,12 +284,12 @@ trait ImplicitRunInfo { self: RunInfo => override implicit protected val ctx: Context = liftingCtx override def stopAtStatic = true def apply(tp: Type) = tp match { - case tp: TypeRef if tp.symbol.isLambdaTrait => + case tp: TypeRef if tp.symbol.isLambdaTraitOBS => defn.AnyType case tp: TypeRef if tp.symbol.isAbstractOrAliasType => val pre = tp.prefix def joinClass(tp: Type, cls: ClassSymbol) = - if (cls.isLambdaTrait) tp + if (cls.isLambdaTraitOBS) tp else AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner)) val lead = if (tp.prefix eq NoPrefix) defn.AnyType else apply(tp.prefix) (lead /: tp.classSymbols)(joinClass) diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index bac443735..f98b8114c 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -149,6 +149,7 @@ class tests extends CompilerTest { @Test def compileMixed = compileLine( """tests/pos/B.scala |./scala-scala/src/library/scala/collection/immutable/Seq.scala + |./scala-scala/src/library/scala/collection/parallel/ParSeq.scala |./scala-scala/src/library/scala/package.scala |./scala-scala/src/library/scala/collection/GenSeqLike.scala |./scala-scala/src/library/scala/collection/SeqLike.scala diff --git a/tests/disabled/neg/named-params.scala b/tests/disabled/neg/named-params.scala new file mode 100644 index 000000000..5a2375b15 --- /dev/null +++ b/tests/disabled/neg/named-params.scala @@ -0,0 +1,37 @@ +package namedparams + +class C[type Elem, type Value](val elem: Elem) { + def toVal: Elem = ??? +} + +abstract class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) +abstract class D2[Elem, V](elem: Elem) extends C[Elem, V](elem) // error +abstract class D3[type Elem, V](x: V) extends C[V, V](x) // error +abstract class D4[type Elem](elem: Elem) extends C[Elem, Elem] // error +object Test { + val c = new C[String, String]("A") { + override def toVal = elem + } + val x: c.Elem = c.elem + + val c2: C { type Elem = String } = c + + val c3 = new C[Elem = String, Value = Int]("B") + val c4 = new C[Elem = String]("C") + val x2: c2.Elem = c2.elem + + val c5 = new C[Elem1 = String, Value0 = Int]("B") // error // error + + def d2[E, V](x: E) = new C[Elem = E, Value = V](x) + + val dup = d2[E = Int, V = String, E = Boolean](2) // error + val z1 = d2[Elem = Int, Value = String](1) // error // error + val z2 = d2[Value = String, Elem = Int](1) // error // error + val z3 = d2[Elem = Int](1) // error + val z4 = d2[Value = Int]("AAA") // error + val z5 = d2[Elem = Int][Value = String](1) //error // error + +} + + + diff --git a/tests/disabled/pos/CollectionStrawMan3.scala b/tests/disabled/pos/CollectionStrawMan3.scala new file mode 100644 index 000000000..c21a73f00 --- /dev/null +++ b/tests/disabled/pos/CollectionStrawMan3.scala @@ -0,0 +1,408 @@ +package strawman.collections + +import Predef.{augmentString => _, wrapString => _, _} +import scala.reflect.ClassTag + +/** A strawman architecture for new collections. It contains some + * example collection classes and methods with the intent to expose + * some key issues. It would be good to compare this to other + * implementations of the same functionality, to get an idea of the + * strengths and weaknesses of different collection architectures. + * + * For a test file, see tests/run/CollectionTests.scala. + * + * This one is like CollectionStrawMan1, but with the named parameter + * scheme for hk types. + */ +object CollectionStrawMan1 { + + /* ------------ Base Traits -------------------------------- */ + + /** Replaces TraversableOnce */ + trait CanIterate[type +Elem] { + def iterator: Iterator[Elem] + } + + /** Base trait for instances that can construct a collection from an iterator */ + trait FromIterator[+C <: Iterable] { + def fromIterator[B](it: Iterator[B]): C[Elem = B] + } + + /** Base trait for companion objects of collections */ + trait IterableFactory[+C <: Iterable] extends FromIterator[C] { + def empty[X]: C[Elem = X] = fromIterator(Iterator.empty) + def apply[A](xs: A*): C[Elem = A] = fromIterator(Iterator(xs: _*)) + } + + /** Base trait for generic collections */ + trait Iterable[type +Elem] extends CanIterate[Elem] with FromIterator[Iterable] + + /** Base trait for sequence collections */ + trait Seq[type +Elem] extends Iterable[Elem] with FromIterator[Seq] { + def apply(i: Int): Elem + def length: Int + } + + /* ------------ Operations ----------------------------------- */ + + /** Operations returning types unrelated to current collection */ + trait Ops[A] extends Any { + def iterator: Iterator[A] + def foreach(f: A => Unit): Unit = iterator.foreach(f) + def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op) + def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op) + def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p) + def isEmpty: Boolean = !iterator.hasNext + def head: A = iterator.next + def view: View[A] = new View(iterator) + def to[C <: Iterable](fi: FromIterator[C]): C[Elem = A] = fi.fromIterator(iterator) + } + + /** Transforms returning same collection type */ + trait MonoTransforms[A, Repr] extends Any { + protected def iter: Iterator[A] + protected def fromIter(it: => Iterator[A]): Repr + def partition(p: A => Boolean): (Repr, Repr) = { + val (xs, ys) = iter.partition(p) + (fromIter(xs), fromIter(ys)) + } + def drop(n: Int): Repr = fromIter(iter.drop(n)) + } + + /** Transforms returning same collection type constructor */ + trait PolyTransforms[A, C <: CanIterate] extends Any { + protected def iter: Iterator[A] + protected def fromIter[B](it: => Iterator[B]): C[Elem = B] + def map[B](f: A => B): C[Elem = B] = fromIter(iter.map(f)) + def flatMap[B](f: A => CanIterate[B]): C[Elem = B] = fromIter(iter.flatMap(f(_))) + def ++[B >: A](xs: CanIterate[B]): C[Elem = B] = fromIter(iter ++ xs) + def zip[B](xs: CanIterate[B]): C[Elem = (A, B)] = fromIter(iter.zip(xs.iterator)) + } + + /** Transforms that only apply to Seq */ + trait MonoTransformsOfSeqs[A, Repr] extends Any with MonoTransforms[A, Repr] { + def reverse: Repr = fromIter(iter.reverse) + } + + /** Implementation of Ops for all generic collections */ + implicit class IterableOps[A](val c: Iterable[A]) + extends AnyVal with Ops[A] { + def iterator = c.iterator + } + + /** Implementation of MonoTransforms for all generic collections */ + implicit class IterableMonoTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) + extends AnyVal with MonoTransforms[A, C[Elem = A]] { + protected def iter = c.iterator + protected def fromIter(it: => Iterator[A]): C[Elem = A] = c.fromIterator(it) + } + + /** Implementation of PolyTransforms for all generic collections */ + implicit class IterablePolyTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) + extends AnyVal with PolyTransforms[A, C] { + protected def iter = c.iterator + protected def fromIter[B](it: => Iterator[B]) = c.fromIterator(it) + } + + /** Implementation of MonoTransformsForSeqs for all generic collections */ + implicit class SeqMonoTransforms[A, C <: Seq](val c: Seq[A] with FromIterator[C]) + extends AnyVal with MonoTransformsOfSeqs[A, C[Elem = A]] { + protected def iter = c.iterator + protected def fromIter(it: => Iterator[A]) = c.fromIterator(it) + } + + /* --------- Concrete collection types ------------------------------- */ + + /** Concrete collection type: List */ + sealed trait List[type +Elem] extends Seq[Elem] with FromIterator[List] { + def isEmpty: Boolean + def head: Elem + def tail: List[Elem] + def iterator = new ListIterator[Elem](this) + def fromIterator[B](it: Iterator[B]): List[B] = List.fromIterator(it) + def apply(i: Int): Elem = { + require(!isEmpty) + if (i == 0) head else tail.apply(i - 1) + } + def length: Int = + if (isEmpty) 0 else 1 + tail.length + } + + case class Cons[+A](x: A, xs: List[A]) extends List[A] { + def isEmpty = false + def head = x + def tail = xs + } + + case object Nil extends List[Nothing] { + def isEmpty = true + def head = ??? + def tail = ??? + } + + object List extends IterableFactory[List] { + def fromIterator[B](it: Iterator[B]): List[B] = it match { + case it: ListIterator[B] => it.toList + case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil + } + } + + class ListIterator[+A](xs: List[A]) extends Iterator[A] { + private[this] var current = xs + def hasNext = !current.isEmpty + def next = { val r = current.head; current = current.tail; r } + def toList = current + } + + /** Concrete collection type: ArrayBuffer */ + class ArrayBuffer[type Elem] private (initElems: Array[AnyRef], initLength: Int) extends Seq[Elem] with FromIterator[ArrayBuffer] { + def this() = this(new Array[AnyRef](16), 0) + private var elems: Array[AnyRef] = initElems + private var start = 0 + private var limit = initLength + def apply(i: Int) = elems(start + i).asInstanceOf[Elem] + def length = limit - start + def iterator = new ArrayBufferIterator[Elem](elems, start, length) + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = + ArrayBuffer.fromIterator(it) + def +=(elem: Elem): this.type = { + if (limit == elems.length) { + if (start > 0) { + Array.copy(elems, start, elems, 0, length) + limit -= start + start = 0 + } + else { + val newelems = new Array[AnyRef](limit * 2) + Array.copy(elems, 0, newelems, 0, limit) + elems = newelems + } + } + elems(limit) = elem.asInstanceOf[AnyRef] + limit += 1 + this + } + def trimStart(n: Int): Unit = start += (n max 0) + override def toString = s"ArrayBuffer(${elems.slice(start, limit).mkString(", ")})" + } + + object ArrayBuffer extends IterableFactory[ArrayBuffer] { + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = it match { + case Iterator.Concat(fst: ArrayBufferIterator[_], snd: ArrayBufferIterator[_]) => + val elems = new Array[AnyRef](fst.remaining + snd.remaining) + Array.copy(fst.elems, fst.start, elems, 0, fst.remaining) + Array.copy(snd.elems, snd.start, elems, fst.remaining, snd.remaining) + new ArrayBuffer(elems, elems.length) + case it @ Iterator.Partition(underlying, _, buf, _) => + while (underlying.hasNext) it.distribute() + buf.asInstanceOf[ArrayBuffer[B]] + case it if it.remaining >= 0 => + val elems = new Array[AnyRef](it.remaining) + for (i <- 0 until elems.length) elems(i) = it.next.asInstanceOf[AnyRef] + new ArrayBuffer[B](elems, elems.length) + case _ => + val buf = new ArrayBuffer[B] + while (it.hasNext) buf += it.next + buf + } + } + + class ArrayBufferIterator[A](val elems: Array[AnyRef], initStart: Int, length: Int) extends RandomAccessIterator[A] { + val limit = length + def apply(n: Int) = elems(initStart + n).asInstanceOf[A] + } + + /** Concrete collection type: View */ + class View[type +Elem](it: => Iterator[Elem]) extends CanIterate[Elem] { + def iterator = it + } + + implicit class ViewOps[A](val v: View[A]) extends AnyVal with Ops[A] { + def iterator = v.iterator + def cache = to(ArrayBuffer).view + } + + implicit class ViewMonoTransforms[A](val v: View[A]) + extends AnyVal with MonoTransforms[A, View[A]] { + protected def iter = v.iterator + protected def fromIter(it: => Iterator[A]): View[A] = new View(it) + } + + implicit class ViewPolyTransforms[A](val v: View[A]) + extends AnyVal with PolyTransforms[A, View] { + protected def iter = v.iterator + protected def fromIter[B](it: => Iterator[B]) = new View(it) + } + + /** Concrete collection type: String */ + implicit class StringOps(val s: String) extends AnyVal with Ops[Char] { + def iterator: Iterator[Char] = new RandomAccessIterator[Char] { + override val limit = s.length + def apply(n: Int) = s.charAt(n) + } + } + + implicit class StringMonoTransforms(val s: String) + extends AnyVal with MonoTransformsOfSeqs[Char, String] { + protected def iter = StringOps(s).iterator + protected def fromIter(it: => Iterator[Char]) = { + val sb = new StringBuilder + for (ch <- it) sb.append(ch) + sb.toString + } + } + + implicit class StringPolyTransforms(val s: String) + extends AnyVal with PolyTransforms[Char, Seq] { + protected def iter = StringOps(s).iterator + protected def fromIter[B](it: => Iterator[B]) = List.fromIterator(it) + def map(f: Char => Char): String = { + val sb = new StringBuilder + for (ch <- s) sb.append(f(ch)) + sb.toString + } + def flatMap(f: Char => String) = { + val sb = new StringBuilder + for (ch <- s) sb.append(f(ch)) + sb.toString + } + def ++(xs: CanIterate[Char]): String = { + val sb = new StringBuilder(s) + for (ch <- xs.iterator) sb.append(ch) + sb.toString + } + def ++(xs: String): String = s + xs + } + +/* ---------- Iterators --------------------------------------------------- */ + + /** A core Iterator class */ + trait Iterator[+A] extends CanIterate[A] { self => + def hasNext: Boolean + def next: A + def iterator = this + def foldLeft[B](z: B)(op: (B, A) => B): B = + if (hasNext) foldLeft(op(z, next))(op) else z + def foldRight[B](z: B)(op: (A, B) => B): B = + if (hasNext) op(next, foldRight(z)(op)) else z + def foreach(f: A => Unit): Unit = + while (hasNext) f(next) + def indexWhere(p: A => Boolean): Int = { + var i = 0 + while (hasNext) { + if (p(next)) return i + i += 1 + } + -1 + } + def map[B](f: A => B): Iterator[B] = Iterator.Map(this, f) + def flatMap[B](f: A => CanIterate[B]): Iterator[B] = Iterator.FlatMap(this, f) + def ++[B >: A](xs: CanIterate[B]): Iterator[B] = Iterator.Concat(this, xs.iterator) + def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { + val lookaheadTrue, lookaheadFalse = new ArrayBuffer[A] + (Iterator.Partition(this, p, lookaheadTrue, lookaheadFalse), + Iterator.Partition[A](this, !p(_), lookaheadFalse, lookaheadTrue)) + } + def drop(n: Int): Iterator[A] = Iterator.Drop(this, n) + def zip[B](that: CanIterate[B]): Iterator[(A, B)] = Iterator.Zip(this, that.iterator) + def reverse: Iterator[A] = { + var elems: List[A] = Nil + while (hasNext) elems = Cons(next, elems) + elems.iterator + } + + /** If this iterator results from applying a transfomation to another iterator, + * that other iterator, otherwise the iterator itself. + */ + def underlying: Iterator[_] = this + + /** If the number of elements still to be returned by this iterator is known, + * that number, otherwise -1. + */ + def remaining = -1 + } + + object Iterator { + val empty: Iterator[Nothing] = new Iterator[Nothing] { + def hasNext = false + def next = ??? + override def remaining = 0 + } + def apply[A](xs: A*): Iterator[A] = new RandomAccessIterator[A] { + override val limit = xs.length + def apply(n: Int) = xs(n) + } + def nextOnEmpty = throw new NoSuchElementException("next on empty iterator") + + case class Map[A, B](override val underlying: Iterator[A], f: A => B) extends Iterator[B] { + def hasNext = underlying.hasNext + def next = f(underlying.next) + override def remaining = underlying.remaining + } + case class FlatMap[A, B](override val underlying: Iterator[A], f: A => CanIterate[B]) extends Iterator[B] { + private var myCurrent: Iterator[B] = Iterator.empty + private def current = { + while (!myCurrent.hasNext && underlying.hasNext) + myCurrent = f(underlying.next).iterator + myCurrent + } + def hasNext = current.hasNext + def next = current.next + } + case class Concat[A](override val underlying: Iterator[A], other: Iterator[A]) extends Iterator[A] { + private var myCurrent = underlying + private def current = { + if (!myCurrent.hasNext && myCurrent.eq(underlying)) myCurrent = other + myCurrent + } + def hasNext = current.hasNext + def next = current.next + override def remaining = + if (underlying.remaining >= 0 && other.remaining >= 0) + underlying.remaining + other.remaining + else -1 + } + case class Partition[A](override val underlying: Iterator[A], p: A => Boolean, lookahead: ArrayBuffer[A], dual: ArrayBuffer[A]) extends Iterator[A] { + def distribute() = { + val elem = underlying.next + (if (p(elem)) lookahead else dual) += elem + } + final def hasNext: Boolean = + !lookahead.isEmpty || underlying.hasNext && { distribute(); hasNext } + final def next = + if (hasNext) { + val r = lookahead.head + lookahead.trimStart(1) + r + } else Iterator.nextOnEmpty + } + case class Drop[A](override val underlying: Iterator[A], n: Int) extends Iterator[A] { + var toSkip = n + def hasNext: Boolean = underlying.hasNext && ( + toSkip == 0 || { underlying.next; toSkip -= 1; hasNext }) + def next = if (hasNext) underlying.next else nextOnEmpty + override def remaining = (underlying.remaining - toSkip) max -1 + } + case class Zip[A, B](override val underlying: Iterator[A], other: Iterator[B]) extends Iterator[(A, B)] { + def hasNext = underlying.hasNext && other.hasNext + def next = (underlying.next, other.next) + override def remaining = underlying.remaining min other.remaining + } + case class Reverse[A](override val underlying: RandomAccessIterator[A]) extends RandomAccessIterator[A] { + def apply(n: Int) = underlying.apply(underlying.limit - 1 - n) + def limit = underlying.remaining + } + } + + trait RandomAccessIterator[+A] extends Iterator[A] { self => + def apply(n: Int): A + def limit: Int + var start = 0 + override def remaining = (limit - start) max 0 + def hasNext = start < limit + def next: A = { val r = this(start); start += 1; r } + override def drop(n: Int): Iterator[A] = { start += (n max 0); this } + override def reverse: Iterator[A] = new Iterator.Reverse(this) + } +} + diff --git a/tests/disabled/pos/flowops.scala b/tests/disabled/pos/flowops.scala new file mode 100644 index 000000000..6aead26be --- /dev/null +++ b/tests/disabled/pos/flowops.scala @@ -0,0 +1,31 @@ +object Test { + import language.higherKinds + + class NotUsed + + trait FO[+Out, +Mat] { self => + type Repr[+O] <: FO[O, Mat] { + type Repr[+OO] = self.Repr[OO] + } + def map[T](f: Out => T): Repr[T] = ??? + } + + class Source[+O, +M] extends FO[O, M] { + type Repr[+OO] <: Source[OO, M] + } + + class Flow[-I, +O, +M] extends FO[O, M] { + type Repr[+OO] <: Flow[I, OO, M] + } + + implicit class x[O, M, F[o, m] <: FO[o, m]](val f: F[O, M]) extends AnyVal { + def xx(i: Int): f.Repr[O] = f.map(identity) + } + + type IntFlow[O, M] = Flow[Int, O, M] + + val s1 = new Source[Int, NotUsed].xx(12) + val s2: Source[Int, NotUsed] = s1 + val f1 = x[Int, NotUsed, IntFlow](new Flow[Int, Int, NotUsed]).xx(12) + val f2: Flow[Int, Int, NotUsed] = f1 +} diff --git a/tests/disabled/pos/flowops1.scala b/tests/disabled/pos/flowops1.scala new file mode 100644 index 000000000..649a9b18c --- /dev/null +++ b/tests/disabled/pos/flowops1.scala @@ -0,0 +1,39 @@ +object Test { + class NotUsed + + trait FO[type +Out, type +Mat] { self => + type Repr <: FO[Mat = self.Mat] { + type Repr = self.Repr + } + def map[T](f: Out => T): Repr[Out = T] = ??? + } + + class Source[type +Out, type +Mat] extends FO[Out, Mat] { self => + type Repr <: Source[Mat = self.Mat] + } + + class Flow[type -In, type +Out, type +Mat] extends FO[Out, Mat] { self => + type Repr <: Flow[In = self.In, Mat = self.Mat] + } + + implicit class x[O, M, F <: FO](val f: F[Out = O, Mat = M]) extends AnyVal { + def xx(i: Int): f.Repr[Out = O] = f.map(identity) + } + + class xalt[O, M, F <: FO](val f: F[Out = O, Mat = M]) extends AnyVal { + def xx(i: Int): FO[Out = O, Mat = M] = ??? + } + + val s1 = new Source[Int, NotUsed].xx(12) + val s2: Source[Int, NotUsed] = s1 + val f1 = x[Int, NotUsed, Flow[In = Int]](new Flow[Int, Int, NotUsed]).xx(12) + val f2: Flow[Int, Int, NotUsed] = f1 + + + val f3 = x(new Flow[Int, Int, NotUsed]).xx(12) + val f4: Flow[Int, Int, NotUsed] = f3 + val f5 = new Flow[Int, Int, NotUsed].xx(12) + val f6: Flow[Int, Int, NotUsed] = f5 + val f7 = new xalt(new Flow[Int, Int, NotUsed]).xx(12) + val f8: FO[Int, NotUsed] = f7 +} diff --git a/tests/disabled/pos/hk-named.scala b/tests/disabled/pos/hk-named.scala new file mode 100644 index 000000000..5f2cb6c74 --- /dev/null +++ b/tests/disabled/pos/hk-named.scala @@ -0,0 +1,58 @@ +import language.higherKinds + +object hk0 { + + trait Lambda[type Elem] + + abstract class Functor[F <: Lambda] { + def map[A, B](f: A => B): F[Elem = A] => F[Elem = B] + } + + object test1 { + class ListT[T] extends Lambda[T] + + val ml: Functor[ListT] = ??? + val mx = ml + var xs: ListT[Int] = ??? + var ys: ListT { type Elem = Int } = xs + xs = ys + val mm: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map[Int, Boolean] + val mm2: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map + } +} + + +object higherKinded { + + type Untyped = Null + + class Tree[type -Attr >: Untyped] { + type ThisType <: Tree + def withString(s: String): ThisType[Attr = String] = withString(s) + } +/* + class Ident[-Attr >: Untyped] extends Tree[Attr] { + type ThisType = Ident + } + + val id = new Ident[Integer] + + val y = id.withString("abc") + + val z: Ident[String] = y + + val zz: tpd.Tree = y + + abstract class Instance[T >: Untyped] {g + type Tree = higherKinded.Tree[T] + } + + object tpd extends Instance[String] + + def transform(tree: Tree[String]) = { + val tree1 = tree.withString("") + tree1: Tree[String] + } +*/ +} + diff --git a/tests/disabled/pos/named-params.scala b/tests/disabled/pos/named-params.scala new file mode 100644 index 000000000..3fab24cd2 --- /dev/null +++ b/tests/disabled/pos/named-params.scala @@ -0,0 +1,90 @@ +package namedparams + +class C[type Elem, type Value](val elem: Elem) { + def toVal: Elem = ??? +} + +class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) + +object Test { + val c = new C[String, String]("A") { + override def toVal = elem + } + val x: c.Elem = c.elem + + val c2: C { type Elem = String } = c + + val c3 = new C[Elem = String, Value = Int]("B") + val c4 = new C[Elem = String]("C") + val x2: c2.Elem = c2.elem + + def d1[E, V](x: E) = new D[E, V](x) + def d2[E, V](x: E) = new C[Elem = E, Value = V](x) + + val y1 = d1[Int, String](1) + val y2 = d1[E = Int](2) + val y3 = d1[V = String](3) + val z1 = d2[E = Int, V = String](1) + val z2 = d2[V = String, E = Int](1) + val z3 = d2[E = Int](1) + val z4 = d2[V = Int]("AAA") + val z5 = d2[E = Int][V = String](1) + +// Testing type inference + + def f[X <: C](x: X[Int, Int]): X[String, String] = ??? + val arg1: C[Int, Int] = ??? + val res1 = f(arg1) + val chk1: C[String, String] = res1 + + class C1[type Elem, type Value](x: Elem) extends C[Elem, Value](x) + class CC extends C1[Int, Int](1) + val arg2: CC = ??? + val res2 = f(arg2) + val chk2: C[String, String] = res2 + + class D1[type Elem, type Value](x: Elem) extends C[Elem, Value](x) + class DD extends D1[Int, Int](2) + val arg3: CC & DD = ??? + val res3 = f(arg3) + val chk3: (C1 & D1) { type Elem = String; type Value = String } = res3 + val arg4: CC | DD = ??? + val res4 = f(arg4) + val chk4: C[String, String] = ??? + + class CX[type Elem](x: Elem) extends C1[Elem, Int](x) + class DX[type Value]() extends D1[Int, Value](2) + val arg5: CX[Int] & DX[Int] = ??? + val res5 = f(arg5) + val chk5: (C1 & D1) { type Elem = String; type Value = String } = res5 + val chk6: C1[String, String] & D1[String, String] = chk5 + val chk7: (C1 & D1) { type Elem = String; type Value = String } = chk6 +} + +// Adapted from i94-nada, somewhat non-sensical +trait Test1 { + trait Monad[type Elem] { + def unit: Elem + } + sealed abstract class Either[A,B] + case class Left[A,B](unit: A) extends Either[A,B] with Monad[A] + case class Right[A,B](unit: B) extends Either[A,B] with Monad[B] + def flatMap[X,Y,M <: Monad](m: M[Elem = X], f: X => M[Elem = Y]): M[Elem = Y] = f(m.unit) + val res = flatMap(Left(1), {x: Int => Left(x)}) + val chk: Either[Int, Nothing] & Monad & Product1[Int] = res +} + +// Adapted from i94-nada, this time with more sense +trait Test2 { + trait Monad[type Elem] { + def unit: Elem + } + sealed abstract class Either[A,B] + case class Left[type Elem, B](unit: Elem) extends Either[Elem,B] with Monad[Elem] + case class Right[A, type Elem](unit: Elem) extends Either[A,Elem] with Monad[Elem] + def flatMap[X,Y,M <: Monad](m: M[Elem = X], f: X => M[Elem = Y]): M[Elem = Y] = f(m.unit) + val res = flatMap(Left(1), {x: Int => Left(x)}) + val chk: Left[Int, Nothing] = res +} + + diff --git a/tests/neg/named-params.scala b/tests/neg/named-params.scala deleted file mode 100644 index 5a2375b15..000000000 --- a/tests/neg/named-params.scala +++ /dev/null @@ -1,37 +0,0 @@ -package namedparams - -class C[type Elem, type Value](val elem: Elem) { - def toVal: Elem = ??? -} - -abstract class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) -abstract class D2[Elem, V](elem: Elem) extends C[Elem, V](elem) // error -abstract class D3[type Elem, V](x: V) extends C[V, V](x) // error -abstract class D4[type Elem](elem: Elem) extends C[Elem, Elem] // error -object Test { - val c = new C[String, String]("A") { - override def toVal = elem - } - val x: c.Elem = c.elem - - val c2: C { type Elem = String } = c - - val c3 = new C[Elem = String, Value = Int]("B") - val c4 = new C[Elem = String]("C") - val x2: c2.Elem = c2.elem - - val c5 = new C[Elem1 = String, Value0 = Int]("B") // error // error - - def d2[E, V](x: E) = new C[Elem = E, Value = V](x) - - val dup = d2[E = Int, V = String, E = Boolean](2) // error - val z1 = d2[Elem = Int, Value = String](1) // error // error - val z2 = d2[Value = String, Elem = Int](1) // error // error - val z3 = d2[Elem = Int](1) // error - val z4 = d2[Value = Int]("AAA") // error - val z5 = d2[Elem = Int][Value = String](1) //error // error - -} - - - diff --git a/tests/pos/CollectionStrawMan3.scala b/tests/pos/CollectionStrawMan3.scala deleted file mode 100644 index c21a73f00..000000000 --- a/tests/pos/CollectionStrawMan3.scala +++ /dev/null @@ -1,408 +0,0 @@ -package strawman.collections - -import Predef.{augmentString => _, wrapString => _, _} -import scala.reflect.ClassTag - -/** A strawman architecture for new collections. It contains some - * example collection classes and methods with the intent to expose - * some key issues. It would be good to compare this to other - * implementations of the same functionality, to get an idea of the - * strengths and weaknesses of different collection architectures. - * - * For a test file, see tests/run/CollectionTests.scala. - * - * This one is like CollectionStrawMan1, but with the named parameter - * scheme for hk types. - */ -object CollectionStrawMan1 { - - /* ------------ Base Traits -------------------------------- */ - - /** Replaces TraversableOnce */ - trait CanIterate[type +Elem] { - def iterator: Iterator[Elem] - } - - /** Base trait for instances that can construct a collection from an iterator */ - trait FromIterator[+C <: Iterable] { - def fromIterator[B](it: Iterator[B]): C[Elem = B] - } - - /** Base trait for companion objects of collections */ - trait IterableFactory[+C <: Iterable] extends FromIterator[C] { - def empty[X]: C[Elem = X] = fromIterator(Iterator.empty) - def apply[A](xs: A*): C[Elem = A] = fromIterator(Iterator(xs: _*)) - } - - /** Base trait for generic collections */ - trait Iterable[type +Elem] extends CanIterate[Elem] with FromIterator[Iterable] - - /** Base trait for sequence collections */ - trait Seq[type +Elem] extends Iterable[Elem] with FromIterator[Seq] { - def apply(i: Int): Elem - def length: Int - } - - /* ------------ Operations ----------------------------------- */ - - /** Operations returning types unrelated to current collection */ - trait Ops[A] extends Any { - def iterator: Iterator[A] - def foreach(f: A => Unit): Unit = iterator.foreach(f) - def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op) - def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op) - def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p) - def isEmpty: Boolean = !iterator.hasNext - def head: A = iterator.next - def view: View[A] = new View(iterator) - def to[C <: Iterable](fi: FromIterator[C]): C[Elem = A] = fi.fromIterator(iterator) - } - - /** Transforms returning same collection type */ - trait MonoTransforms[A, Repr] extends Any { - protected def iter: Iterator[A] - protected def fromIter(it: => Iterator[A]): Repr - def partition(p: A => Boolean): (Repr, Repr) = { - val (xs, ys) = iter.partition(p) - (fromIter(xs), fromIter(ys)) - } - def drop(n: Int): Repr = fromIter(iter.drop(n)) - } - - /** Transforms returning same collection type constructor */ - trait PolyTransforms[A, C <: CanIterate] extends Any { - protected def iter: Iterator[A] - protected def fromIter[B](it: => Iterator[B]): C[Elem = B] - def map[B](f: A => B): C[Elem = B] = fromIter(iter.map(f)) - def flatMap[B](f: A => CanIterate[B]): C[Elem = B] = fromIter(iter.flatMap(f(_))) - def ++[B >: A](xs: CanIterate[B]): C[Elem = B] = fromIter(iter ++ xs) - def zip[B](xs: CanIterate[B]): C[Elem = (A, B)] = fromIter(iter.zip(xs.iterator)) - } - - /** Transforms that only apply to Seq */ - trait MonoTransformsOfSeqs[A, Repr] extends Any with MonoTransforms[A, Repr] { - def reverse: Repr = fromIter(iter.reverse) - } - - /** Implementation of Ops for all generic collections */ - implicit class IterableOps[A](val c: Iterable[A]) - extends AnyVal with Ops[A] { - def iterator = c.iterator - } - - /** Implementation of MonoTransforms for all generic collections */ - implicit class IterableMonoTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) - extends AnyVal with MonoTransforms[A, C[Elem = A]] { - protected def iter = c.iterator - protected def fromIter(it: => Iterator[A]): C[Elem = A] = c.fromIterator(it) - } - - /** Implementation of PolyTransforms for all generic collections */ - implicit class IterablePolyTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) - extends AnyVal with PolyTransforms[A, C] { - protected def iter = c.iterator - protected def fromIter[B](it: => Iterator[B]) = c.fromIterator(it) - } - - /** Implementation of MonoTransformsForSeqs for all generic collections */ - implicit class SeqMonoTransforms[A, C <: Seq](val c: Seq[A] with FromIterator[C]) - extends AnyVal with MonoTransformsOfSeqs[A, C[Elem = A]] { - protected def iter = c.iterator - protected def fromIter(it: => Iterator[A]) = c.fromIterator(it) - } - - /* --------- Concrete collection types ------------------------------- */ - - /** Concrete collection type: List */ - sealed trait List[type +Elem] extends Seq[Elem] with FromIterator[List] { - def isEmpty: Boolean - def head: Elem - def tail: List[Elem] - def iterator = new ListIterator[Elem](this) - def fromIterator[B](it: Iterator[B]): List[B] = List.fromIterator(it) - def apply(i: Int): Elem = { - require(!isEmpty) - if (i == 0) head else tail.apply(i - 1) - } - def length: Int = - if (isEmpty) 0 else 1 + tail.length - } - - case class Cons[+A](x: A, xs: List[A]) extends List[A] { - def isEmpty = false - def head = x - def tail = xs - } - - case object Nil extends List[Nothing] { - def isEmpty = true - def head = ??? - def tail = ??? - } - - object List extends IterableFactory[List] { - def fromIterator[B](it: Iterator[B]): List[B] = it match { - case it: ListIterator[B] => it.toList - case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil - } - } - - class ListIterator[+A](xs: List[A]) extends Iterator[A] { - private[this] var current = xs - def hasNext = !current.isEmpty - def next = { val r = current.head; current = current.tail; r } - def toList = current - } - - /** Concrete collection type: ArrayBuffer */ - class ArrayBuffer[type Elem] private (initElems: Array[AnyRef], initLength: Int) extends Seq[Elem] with FromIterator[ArrayBuffer] { - def this() = this(new Array[AnyRef](16), 0) - private var elems: Array[AnyRef] = initElems - private var start = 0 - private var limit = initLength - def apply(i: Int) = elems(start + i).asInstanceOf[Elem] - def length = limit - start - def iterator = new ArrayBufferIterator[Elem](elems, start, length) - def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = - ArrayBuffer.fromIterator(it) - def +=(elem: Elem): this.type = { - if (limit == elems.length) { - if (start > 0) { - Array.copy(elems, start, elems, 0, length) - limit -= start - start = 0 - } - else { - val newelems = new Array[AnyRef](limit * 2) - Array.copy(elems, 0, newelems, 0, limit) - elems = newelems - } - } - elems(limit) = elem.asInstanceOf[AnyRef] - limit += 1 - this - } - def trimStart(n: Int): Unit = start += (n max 0) - override def toString = s"ArrayBuffer(${elems.slice(start, limit).mkString(", ")})" - } - - object ArrayBuffer extends IterableFactory[ArrayBuffer] { - def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = it match { - case Iterator.Concat(fst: ArrayBufferIterator[_], snd: ArrayBufferIterator[_]) => - val elems = new Array[AnyRef](fst.remaining + snd.remaining) - Array.copy(fst.elems, fst.start, elems, 0, fst.remaining) - Array.copy(snd.elems, snd.start, elems, fst.remaining, snd.remaining) - new ArrayBuffer(elems, elems.length) - case it @ Iterator.Partition(underlying, _, buf, _) => - while (underlying.hasNext) it.distribute() - buf.asInstanceOf[ArrayBuffer[B]] - case it if it.remaining >= 0 => - val elems = new Array[AnyRef](it.remaining) - for (i <- 0 until elems.length) elems(i) = it.next.asInstanceOf[AnyRef] - new ArrayBuffer[B](elems, elems.length) - case _ => - val buf = new ArrayBuffer[B] - while (it.hasNext) buf += it.next - buf - } - } - - class ArrayBufferIterator[A](val elems: Array[AnyRef], initStart: Int, length: Int) extends RandomAccessIterator[A] { - val limit = length - def apply(n: Int) = elems(initStart + n).asInstanceOf[A] - } - - /** Concrete collection type: View */ - class View[type +Elem](it: => Iterator[Elem]) extends CanIterate[Elem] { - def iterator = it - } - - implicit class ViewOps[A](val v: View[A]) extends AnyVal with Ops[A] { - def iterator = v.iterator - def cache = to(ArrayBuffer).view - } - - implicit class ViewMonoTransforms[A](val v: View[A]) - extends AnyVal with MonoTransforms[A, View[A]] { - protected def iter = v.iterator - protected def fromIter(it: => Iterator[A]): View[A] = new View(it) - } - - implicit class ViewPolyTransforms[A](val v: View[A]) - extends AnyVal with PolyTransforms[A, View] { - protected def iter = v.iterator - protected def fromIter[B](it: => Iterator[B]) = new View(it) - } - - /** Concrete collection type: String */ - implicit class StringOps(val s: String) extends AnyVal with Ops[Char] { - def iterator: Iterator[Char] = new RandomAccessIterator[Char] { - override val limit = s.length - def apply(n: Int) = s.charAt(n) - } - } - - implicit class StringMonoTransforms(val s: String) - extends AnyVal with MonoTransformsOfSeqs[Char, String] { - protected def iter = StringOps(s).iterator - protected def fromIter(it: => Iterator[Char]) = { - val sb = new StringBuilder - for (ch <- it) sb.append(ch) - sb.toString - } - } - - implicit class StringPolyTransforms(val s: String) - extends AnyVal with PolyTransforms[Char, Seq] { - protected def iter = StringOps(s).iterator - protected def fromIter[B](it: => Iterator[B]) = List.fromIterator(it) - def map(f: Char => Char): String = { - val sb = new StringBuilder - for (ch <- s) sb.append(f(ch)) - sb.toString - } - def flatMap(f: Char => String) = { - val sb = new StringBuilder - for (ch <- s) sb.append(f(ch)) - sb.toString - } - def ++(xs: CanIterate[Char]): String = { - val sb = new StringBuilder(s) - for (ch <- xs.iterator) sb.append(ch) - sb.toString - } - def ++(xs: String): String = s + xs - } - -/* ---------- Iterators --------------------------------------------------- */ - - /** A core Iterator class */ - trait Iterator[+A] extends CanIterate[A] { self => - def hasNext: Boolean - def next: A - def iterator = this - def foldLeft[B](z: B)(op: (B, A) => B): B = - if (hasNext) foldLeft(op(z, next))(op) else z - def foldRight[B](z: B)(op: (A, B) => B): B = - if (hasNext) op(next, foldRight(z)(op)) else z - def foreach(f: A => Unit): Unit = - while (hasNext) f(next) - def indexWhere(p: A => Boolean): Int = { - var i = 0 - while (hasNext) { - if (p(next)) return i - i += 1 - } - -1 - } - def map[B](f: A => B): Iterator[B] = Iterator.Map(this, f) - def flatMap[B](f: A => CanIterate[B]): Iterator[B] = Iterator.FlatMap(this, f) - def ++[B >: A](xs: CanIterate[B]): Iterator[B] = Iterator.Concat(this, xs.iterator) - def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { - val lookaheadTrue, lookaheadFalse = new ArrayBuffer[A] - (Iterator.Partition(this, p, lookaheadTrue, lookaheadFalse), - Iterator.Partition[A](this, !p(_), lookaheadFalse, lookaheadTrue)) - } - def drop(n: Int): Iterator[A] = Iterator.Drop(this, n) - def zip[B](that: CanIterate[B]): Iterator[(A, B)] = Iterator.Zip(this, that.iterator) - def reverse: Iterator[A] = { - var elems: List[A] = Nil - while (hasNext) elems = Cons(next, elems) - elems.iterator - } - - /** If this iterator results from applying a transfomation to another iterator, - * that other iterator, otherwise the iterator itself. - */ - def underlying: Iterator[_] = this - - /** If the number of elements still to be returned by this iterator is known, - * that number, otherwise -1. - */ - def remaining = -1 - } - - object Iterator { - val empty: Iterator[Nothing] = new Iterator[Nothing] { - def hasNext = false - def next = ??? - override def remaining = 0 - } - def apply[A](xs: A*): Iterator[A] = new RandomAccessIterator[A] { - override val limit = xs.length - def apply(n: Int) = xs(n) - } - def nextOnEmpty = throw new NoSuchElementException("next on empty iterator") - - case class Map[A, B](override val underlying: Iterator[A], f: A => B) extends Iterator[B] { - def hasNext = underlying.hasNext - def next = f(underlying.next) - override def remaining = underlying.remaining - } - case class FlatMap[A, B](override val underlying: Iterator[A], f: A => CanIterate[B]) extends Iterator[B] { - private var myCurrent: Iterator[B] = Iterator.empty - private def current = { - while (!myCurrent.hasNext && underlying.hasNext) - myCurrent = f(underlying.next).iterator - myCurrent - } - def hasNext = current.hasNext - def next = current.next - } - case class Concat[A](override val underlying: Iterator[A], other: Iterator[A]) extends Iterator[A] { - private var myCurrent = underlying - private def current = { - if (!myCurrent.hasNext && myCurrent.eq(underlying)) myCurrent = other - myCurrent - } - def hasNext = current.hasNext - def next = current.next - override def remaining = - if (underlying.remaining >= 0 && other.remaining >= 0) - underlying.remaining + other.remaining - else -1 - } - case class Partition[A](override val underlying: Iterator[A], p: A => Boolean, lookahead: ArrayBuffer[A], dual: ArrayBuffer[A]) extends Iterator[A] { - def distribute() = { - val elem = underlying.next - (if (p(elem)) lookahead else dual) += elem - } - final def hasNext: Boolean = - !lookahead.isEmpty || underlying.hasNext && { distribute(); hasNext } - final def next = - if (hasNext) { - val r = lookahead.head - lookahead.trimStart(1) - r - } else Iterator.nextOnEmpty - } - case class Drop[A](override val underlying: Iterator[A], n: Int) extends Iterator[A] { - var toSkip = n - def hasNext: Boolean = underlying.hasNext && ( - toSkip == 0 || { underlying.next; toSkip -= 1; hasNext }) - def next = if (hasNext) underlying.next else nextOnEmpty - override def remaining = (underlying.remaining - toSkip) max -1 - } - case class Zip[A, B](override val underlying: Iterator[A], other: Iterator[B]) extends Iterator[(A, B)] { - def hasNext = underlying.hasNext && other.hasNext - def next = (underlying.next, other.next) - override def remaining = underlying.remaining min other.remaining - } - case class Reverse[A](override val underlying: RandomAccessIterator[A]) extends RandomAccessIterator[A] { - def apply(n: Int) = underlying.apply(underlying.limit - 1 - n) - def limit = underlying.remaining - } - } - - trait RandomAccessIterator[+A] extends Iterator[A] { self => - def apply(n: Int): A - def limit: Int - var start = 0 - override def remaining = (limit - start) max 0 - def hasNext = start < limit - def next: A = { val r = this(start); start += 1; r } - override def drop(n: Int): Iterator[A] = { start += (n max 0); this } - override def reverse: Iterator[A] = new Iterator.Reverse(this) - } -} - diff --git a/tests/pos/flowops.scala b/tests/pos/flowops.scala deleted file mode 100644 index 6aead26be..000000000 --- a/tests/pos/flowops.scala +++ /dev/null @@ -1,31 +0,0 @@ -object Test { - import language.higherKinds - - class NotUsed - - trait FO[+Out, +Mat] { self => - type Repr[+O] <: FO[O, Mat] { - type Repr[+OO] = self.Repr[OO] - } - def map[T](f: Out => T): Repr[T] = ??? - } - - class Source[+O, +M] extends FO[O, M] { - type Repr[+OO] <: Source[OO, M] - } - - class Flow[-I, +O, +M] extends FO[O, M] { - type Repr[+OO] <: Flow[I, OO, M] - } - - implicit class x[O, M, F[o, m] <: FO[o, m]](val f: F[O, M]) extends AnyVal { - def xx(i: Int): f.Repr[O] = f.map(identity) - } - - type IntFlow[O, M] = Flow[Int, O, M] - - val s1 = new Source[Int, NotUsed].xx(12) - val s2: Source[Int, NotUsed] = s1 - val f1 = x[Int, NotUsed, IntFlow](new Flow[Int, Int, NotUsed]).xx(12) - val f2: Flow[Int, Int, NotUsed] = f1 -} diff --git a/tests/pos/flowops1.scala b/tests/pos/flowops1.scala deleted file mode 100644 index 649a9b18c..000000000 --- a/tests/pos/flowops1.scala +++ /dev/null @@ -1,39 +0,0 @@ -object Test { - class NotUsed - - trait FO[type +Out, type +Mat] { self => - type Repr <: FO[Mat = self.Mat] { - type Repr = self.Repr - } - def map[T](f: Out => T): Repr[Out = T] = ??? - } - - class Source[type +Out, type +Mat] extends FO[Out, Mat] { self => - type Repr <: Source[Mat = self.Mat] - } - - class Flow[type -In, type +Out, type +Mat] extends FO[Out, Mat] { self => - type Repr <: Flow[In = self.In, Mat = self.Mat] - } - - implicit class x[O, M, F <: FO](val f: F[Out = O, Mat = M]) extends AnyVal { - def xx(i: Int): f.Repr[Out = O] = f.map(identity) - } - - class xalt[O, M, F <: FO](val f: F[Out = O, Mat = M]) extends AnyVal { - def xx(i: Int): FO[Out = O, Mat = M] = ??? - } - - val s1 = new Source[Int, NotUsed].xx(12) - val s2: Source[Int, NotUsed] = s1 - val f1 = x[Int, NotUsed, Flow[In = Int]](new Flow[Int, Int, NotUsed]).xx(12) - val f2: Flow[Int, Int, NotUsed] = f1 - - - val f3 = x(new Flow[Int, Int, NotUsed]).xx(12) - val f4: Flow[Int, Int, NotUsed] = f3 - val f5 = new Flow[Int, Int, NotUsed].xx(12) - val f6: Flow[Int, Int, NotUsed] = f5 - val f7 = new xalt(new Flow[Int, Int, NotUsed]).xx(12) - val f8: FO[Int, NotUsed] = f7 -} diff --git a/tests/pos/hk-named.scala b/tests/pos/hk-named.scala deleted file mode 100644 index 5f2cb6c74..000000000 --- a/tests/pos/hk-named.scala +++ /dev/null @@ -1,58 +0,0 @@ -import language.higherKinds - -object hk0 { - - trait Lambda[type Elem] - - abstract class Functor[F <: Lambda] { - def map[A, B](f: A => B): F[Elem = A] => F[Elem = B] - } - - object test1 { - class ListT[T] extends Lambda[T] - - val ml: Functor[ListT] = ??? - val mx = ml - var xs: ListT[Int] = ??? - var ys: ListT { type Elem = Int } = xs - xs = ys - val mm: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map[Int, Boolean] - val mm2: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map - } -} - - -object higherKinded { - - type Untyped = Null - - class Tree[type -Attr >: Untyped] { - type ThisType <: Tree - def withString(s: String): ThisType[Attr = String] = withString(s) - } -/* - class Ident[-Attr >: Untyped] extends Tree[Attr] { - type ThisType = Ident - } - - val id = new Ident[Integer] - - val y = id.withString("abc") - - val z: Ident[String] = y - - val zz: tpd.Tree = y - - abstract class Instance[T >: Untyped] {g - type Tree = higherKinded.Tree[T] - } - - object tpd extends Instance[String] - - def transform(tree: Tree[String]) = { - val tree1 = tree.withString("") - tree1: Tree[String] - } -*/ -} - diff --git a/tests/pos/named-params.scala b/tests/pos/named-params.scala deleted file mode 100644 index 3fab24cd2..000000000 --- a/tests/pos/named-params.scala +++ /dev/null @@ -1,90 +0,0 @@ -package namedparams - -class C[type Elem, type Value](val elem: Elem) { - def toVal: Elem = ??? -} - -class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) - -object Test { - val c = new C[String, String]("A") { - override def toVal = elem - } - val x: c.Elem = c.elem - - val c2: C { type Elem = String } = c - - val c3 = new C[Elem = String, Value = Int]("B") - val c4 = new C[Elem = String]("C") - val x2: c2.Elem = c2.elem - - def d1[E, V](x: E) = new D[E, V](x) - def d2[E, V](x: E) = new C[Elem = E, Value = V](x) - - val y1 = d1[Int, String](1) - val y2 = d1[E = Int](2) - val y3 = d1[V = String](3) - val z1 = d2[E = Int, V = String](1) - val z2 = d2[V = String, E = Int](1) - val z3 = d2[E = Int](1) - val z4 = d2[V = Int]("AAA") - val z5 = d2[E = Int][V = String](1) - -// Testing type inference - - def f[X <: C](x: X[Int, Int]): X[String, String] = ??? - val arg1: C[Int, Int] = ??? - val res1 = f(arg1) - val chk1: C[String, String] = res1 - - class C1[type Elem, type Value](x: Elem) extends C[Elem, Value](x) - class CC extends C1[Int, Int](1) - val arg2: CC = ??? - val res2 = f(arg2) - val chk2: C[String, String] = res2 - - class D1[type Elem, type Value](x: Elem) extends C[Elem, Value](x) - class DD extends D1[Int, Int](2) - val arg3: CC & DD = ??? - val res3 = f(arg3) - val chk3: (C1 & D1) { type Elem = String; type Value = String } = res3 - val arg4: CC | DD = ??? - val res4 = f(arg4) - val chk4: C[String, String] = ??? - - class CX[type Elem](x: Elem) extends C1[Elem, Int](x) - class DX[type Value]() extends D1[Int, Value](2) - val arg5: CX[Int] & DX[Int] = ??? - val res5 = f(arg5) - val chk5: (C1 & D1) { type Elem = String; type Value = String } = res5 - val chk6: C1[String, String] & D1[String, String] = chk5 - val chk7: (C1 & D1) { type Elem = String; type Value = String } = chk6 -} - -// Adapted from i94-nada, somewhat non-sensical -trait Test1 { - trait Monad[type Elem] { - def unit: Elem - } - sealed abstract class Either[A,B] - case class Left[A,B](unit: A) extends Either[A,B] with Monad[A] - case class Right[A,B](unit: B) extends Either[A,B] with Monad[B] - def flatMap[X,Y,M <: Monad](m: M[Elem = X], f: X => M[Elem = Y]): M[Elem = Y] = f(m.unit) - val res = flatMap(Left(1), {x: Int => Left(x)}) - val chk: Either[Int, Nothing] & Monad & Product1[Int] = res -} - -// Adapted from i94-nada, this time with more sense -trait Test2 { - trait Monad[type Elem] { - def unit: Elem - } - sealed abstract class Either[A,B] - case class Left[type Elem, B](unit: Elem) extends Either[Elem,B] with Monad[Elem] - case class Right[A, type Elem](unit: Elem) extends Either[A,Elem] with Monad[Elem] - def flatMap[X,Y,M <: Monad](m: M[Elem = X], f: X => M[Elem = Y]): M[Elem = Y] = f(m.unit) - val res = flatMap(Left(1), {x: Int => Left(x)}) - val chk: Left[Int, Nothing] = res -} - - -- cgit v1.2.3 From 6414f3bccf5319d273e8f5eb5461b111e9270b34 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 9 Jun 2016 13:27:11 +0200 Subject: Deal with nested findMember calls over the same RecType --- src/dotty/tools/dotc/core/Types.scala | 48 +++++++++++++++++++++++++++++++---- 1 file changed, 43 insertions(+), 5 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index b779cbf3e..3d4ec6601 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -467,10 +467,44 @@ object Types { case _ => NoDenotation } - def goRec(tp: RecType) = { - //println(s"find member $pre . $name in $tp") - go(tp.parent).mapInfo(_.substRecThis(tp, pre)) - } + def goRec(tp: RecType) = + if (tp.parent == null) NoDenotation + else { + //println(s"find member $pre . $name in $tp") + + // We have to be careful because we might open the same (wrt eq) recursive type + // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)` + // call below. To avoid this problem we do a defensive copy of the recursive + // type first. But if we do this always we risk being inefficient and we run into + // stackoverflows when compiling pos/hk.scala. So we only do a copy if the type + // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`. + // Furthermore, if this happens we mark the original recursive type with `openedTwice` + // which means that we always defensively copy the type in the future. This second + // measure is necessary because findMember calls might be cached, so do not + // necessarily appear in nested order. + // Without the defensive copy, Typer.scala fails to compile at the line + // + // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType) + // + // because the subtype check + // + // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed] + // + // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) + // + // Without the without the `openedTwice` trick, Typer.scala fails to Ycheck + // at phase resolveSuper. + val rt = + if (tp.opened) { // defensive copy + tp.openedTwice = true + RecType(rt => tp.parent.substRecThis(tp, RecThis(rt))) + } else tp + rt.opened = true + try go(rt.parent).mapInfo(_.substRecThis(rt, pre)) + finally { + if (!rt.openedTwice) rt.opened = false + } + } def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) val rinfo = @@ -546,7 +580,7 @@ object Types { try go(this) catch { case ex: Throwable => - core.println(i"findMember exception for $this member $name") + core.println(i"findMember exception for $this member $name, pre = $pre") throw ex // DEBUG } finally { @@ -2242,6 +2276,10 @@ object Types { class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { + // See discussion in findMember#goRec why these vars are needed + private[Types] var opened: Boolean = false + private[Types] var openedTwice: Boolean = false + val parent = parentExp(this) override def underlying(implicit ctx: Context): Type = parent -- cgit v1.2.3 From e749d832e9adebc502c961d743b3b29072f8116a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:57:02 +0200 Subject: Various tweaks - Swap order of subtype tests The theory is that if two refined types have the same refined name, then they are likely to be of related classes. So it seems more fruitful to check the argument before the typeconstructor because that way we test the part that's more likely to fail first. Rough observations seem to indicate a 3% improvement in the junit test time. - Cleanups Drop some unnecessary cases; improve comments. - Smarter handling of LazyRefs in betaReduce Try to combine type constructor and arguments under a common LazyRef. - Optimize RecType/RecType comparisons - Fix compareHkLambda, make it check variances. --- src/dotty/tools/dotc/core/TypeApplications.scala | 21 ++++-- src/dotty/tools/dotc/core/TypeComparer.scala | 89 +++++++++++++++--------- src/dotty/tools/dotc/core/Types.scala | 43 ++++-------- src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- 4 files changed, 86 insertions(+), 69 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index c0728a8fb..0edc598dd 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -38,14 +38,23 @@ object TypeApplications { case _ => tp } - /** Does the variance of `sym1` conform to the variance of `sym2`? + /** Does variance `v1` conform to variance `v2`? * This is the case if the variances are the same or `sym` is nonvariant. */ - def varianceConforms(sym1: MemberBinding, sym2: MemberBinding)(implicit ctx: Context) = - sym1.memberVariance == sym2.memberVariance || sym2.memberVariance == 0 + def varianceConforms(v1: Int, v2: Int)(implicit ctx: Context): Boolean = + v1 == v2 || v2 == 0 - def variancesConform(syms1: List[MemberBinding], syms2: List[MemberBinding])(implicit ctx: Context) = - syms1.corresponds(syms2)(varianceConforms) + /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`? + */ + def varianceConforms(tparam1: MemberBinding, tparam2: MemberBinding)(implicit ctx: Context): Boolean = + varianceConforms(tparam1.memberVariance, tparam2.memberVariance) + + /** Doe the variances of type parameters `tparams1` conform to the variances + * of corresponding type parameters `tparams2`? + * This is only the case of `tparams1` and `tparams2` have the same length. + */ + def variancesConform(tparams1: List[MemberBinding], tparams2: List[MemberBinding])(implicit ctx: Context): Boolean = + tparams1.corresponds(tparams2)(varianceConforms) def fallbackTypeParams(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = { def memberBindings(vs: List[Int]): Type = vs match { @@ -102,7 +111,7 @@ object TypeApplications { def unapply(tp: Type)(implicit ctx: Context): Option[(/*List[Int], */List[TypeBounds], Type)] = if (Config.newHK) { def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { - case t @ RefinedType(p, rname, rinfo: TypeBounds) if rname.isHkArgName && rinfo.isBinding => + case t @ RefinedType(p, rname, rinfo: TypeBounds) if t.isTypeParam => decompose(p, rinfo.bounds :: acc) case t: RecType => decompose(t.parent, acc) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 5f28d07eb..c1b275b70 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -368,20 +368,31 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => - compareHkApply(tp2, tp1, inOrder = false) || - compareHkLambda(tp2, tp1, inOrder = false) || - compareRefinedSlow || - fourthTry(tp1, tp2) || - compareAliasedRefined(tp2, tp1, inOrder = false) + if (tp2.isTypeParam) { + compareHkLambda(tp1, tp2) || + fourthTry(tp1, tp2) + } + else { + compareHkApply(tp2, tp1, inOrder = false) || + compareRefinedSlow || + fourthTry(tp1, tp2) || + compareAliasedRefined(tp2, tp1, inOrder = false) + } } else // fast path, in particular for refinements resulting from parameterization. - isSubType(tp1, skipped2) && // TODO swap? - isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) + isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) && + isSubType(tp1, skipped2) // TODO swap? } compareRefined case tp2: RecType => - val tp1stable = ensureStableSingleton(tp1) - isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) + tp1.safeDealias match { + case tp1: RecType => + val rthis1 = RecThis(tp1) + isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1)) + case _ => + val tp1stable = ensureStableSingleton(tp1) + isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) + } case OrType(tp21, tp22) => // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22) // and analogously for T1 <: T21 | (T221 & T222) @@ -496,7 +507,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => compareHkApply(tp1, tp2, inOrder = true) || - compareHkLambda(tp1, tp2, inOrder = true) || isNewSubType(tp1.parent, tp2) || compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => @@ -609,15 +619,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tryInfer(projection.prefix.typeConstructor.dealias) } - /** If `projection` is a hk projection T#$apply with a constrainable poly param - * as type constructor and `other` is not a hk projection, then perform the following - * steps: + /** Handle subtype tests + * + * app <:< other if inOrder = true + * other <:< app if inOrder = false + * + * where `app` is an hk application but `other` is not. + * + * As a first step, if `app` appears on the right, try to normalize it using + * `normalizeHkApply`, if that gives a different type proceed with a regular subtype + * test using that type instead of `app`. + * + * Otherwise, if `app` has constrainable poly param as type constructor, + * perform the following steps: * * (1) If not `inOrder` then perform the next steps until they all succeed * for each base type of other which - * - derives from a class bound of `projection`, - * - has the same number of type parameters than `projection` - * - has type parameter variances which conform to those of `projection`. + * - derives from a class bound of `app`, + * - has the same number of type parameters as `app` + * - has type parameter variances which conform to those of `app`. * If `inOrder` then perform the same steps on the original `other` type. * * (2) Try to eta expand the constructor of `other`. @@ -627,7 +647,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * (3b) In normal mode, try to unify the projection's hk constructor parameter with * the eta expansion of step(2) * - * (4) If `inOrder`, test `projection <: other` else test `other <: projection`. + * (4) If `inOrder`, test `app <: other` else test `other <: app`. */ def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean): Boolean = { def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($app, $other, inOrder = $inOrder, constr = $tp)", subtyping) { @@ -676,7 +696,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } } Config.newHK && app.isHKApply && !other.isHKApply && { - val reduced = app.normalizeHkApply + val reduced = if (inOrder) app else app.normalizeHkApply if (reduced ne app) if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) else tryInfer(app.typeConstructor.dealias) @@ -684,16 +704,20 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } /** Compare type lambda with non-lambda type. */ - def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match { - case TypeLambda(args, body) => - args.length == other.typeParams.length && { - val applied = other.appliedTo(argRefs(rt, args.length)) - if (inOrder) isSubType(body, applied) - else body match { - case body: TypeBounds => body.contains(applied) // Can be dropped? - case _ => isSubType(applied, body) - } + def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean = tp1.stripTypeVar match { + case TypeLambda(args1, body1) => + //println(i"comparing $tp1 <:< $tp2") + tp2 match { + case TypeLambda(args2, body2) => + args1.corresponds(args2)((arg1, arg2) => + varianceConforms(BindingKind.toVariance(arg1.bindingKind), + BindingKind.toVariance(arg2.bindingKind))) && + // don't compare bounds; it would go in the wrong sense anyway. + isSubType(body1, body2) + case _ => false } + case RefinedType(parent1, _, _) => + compareHkLambda(parent1, tp2) case _ => false } @@ -1523,13 +1547,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } else super.compareHkApply(app, other, inOrder) - override def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = - if (!Config.newHK && rt.refinedName == tpnme.hkApplyOBS || - Config.newHK && rt.isTypeParam) - traceIndented(i"compareHkLambda $rt, $other, $inOrder") { - super.compareHkLambda(rt, other, inOrder) + override def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean = + if (tp2.isTypeParam) + traceIndented(i"compareHkLambda $tp1, $tp2") { + super.compareHkLambda(tp1, tp2) } - else super.compareHkLambda(rt, other, inOrder) + else super.compareHkLambda(tp1, tp2) override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 3d4ec6601..2120706f6 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2151,15 +2151,6 @@ object Types { throw new AssertionError(s"bad instantiation: $this") def checkInst(implicit ctx: Context): this.type = { - if (false && Config.newHK && refinedName.isHkArgName && refinedInfo.isInstanceOf[TypeAlias]) { - parent.stripTypeVar match { - case TypeApplications.TypeLambda(_, _) => - println(i"fshy: $this") - println(s"fshy: $this") - new Error().printStackTrace() - case _ => - } - } if (refinedName == tpnme.hkApplyOBS) parent.stripTypeVar match { case RefinedType(_, name, _) if name.isHkArgName => // ok @@ -2190,12 +2181,16 @@ object Types { case _ => tp } - val reduced = substAlias(parent) - if (reduced ne parent) { - hk.println(i"REDUCE $this ----> ${reduced}") - reduced + parent match { + case parent: LazyRef => + LazyRef(() => derivedRefinedType(parent.ref, refinedName, refinedInfo)) + case _ => + val reduced = substAlias(parent) + if (reduced ne parent) { + hk.println(i"REDUCE $this ----> ${reduced}") + reduced + } else this } - else this case _ => this } @@ -2304,7 +2299,6 @@ object Types { case tp: TypeRef => apply(x, tp.prefix) case tp: RecThis => RecType.this eq tp.binder case tp: LazyRef => true // Assume a reference to be safe. - // TODO: Check that all accumulators handle LazyRefs correctly case _ => foldOver(x, tp) } } @@ -2315,22 +2309,13 @@ object Types { override def computeHash = doHash(parent) override def toString = s"RecType($parent | $hashCode)" + + private def checkInst(implicit ctx: Context): this.type = { + this + } } object RecType { - /* Note: this might well fail for nested Recs. - * Failing scenario: Rebind a nest rec, creates a new rec - * but it still has RecThis references to the outer rec. - def checkInst(tp: Type)(implicit ctx: Context): tp.type = { - var binders: List[RecType] = Nil - tp.foreachPart { - case rt: RecType => binders = rt :: binders - case rt: RecThis => assert(binders contains rt.binder) - case _ => - } - tp - } - */ /** Create a RecType, normalizing its contents. This means: * @@ -2356,7 +2341,7 @@ object Types { case tp => tp } - unique(rt.derivedRecType(normalize(rt.parent))) + unique(rt.derivedRecType(normalize(rt.parent))).checkInst } def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = { val rt = this(parentExp) diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 3b79d7c4c..2b37fa36c 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -218,7 +218,7 @@ object Inferencing { val qualifies = (tvar: TypeVar) => (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy def interpolate() = Stats.track("interpolateUndetVars") { - val tp = tree.tpe.widen // TODO add `.BetaReduce` ? + val tp = tree.tpe.widen constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}") constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}") -- cgit v1.2.3 From d1f809f14cad2c14c312767d71361c7f2e7d8244 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 19:59:16 +0200 Subject: Remove old hk scheme - Simplify RefinedType - Drop recursive definition of RefinedThis - this is now taken over by RecType. - Drop RefinedThis. - Simplify typeParams The logic avoiding forcing is no longer needed. - Remove unused code and out of date comments. --- src/dotty/tools/dotc/config/Config.scala | 2 - src/dotty/tools/dotc/core/Definitions.scala | 65 ----- src/dotty/tools/dotc/core/NameOps.scala | 8 - src/dotty/tools/dotc/core/StdNames.scala | 6 - src/dotty/tools/dotc/core/Substituters.scala | 22 -- src/dotty/tools/dotc/core/SymDenotations.scala | 4 - src/dotty/tools/dotc/core/TypeApplications.scala | 307 +++------------------ src/dotty/tools/dotc/core/TypeComparer.scala | 143 +++------- src/dotty/tools/dotc/core/TypeOps.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 155 ++--------- src/dotty/tools/dotc/core/Uniques.scala | 4 +- src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 3 +- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 5 - .../tools/dotc/core/tasty/TreeUnpickler.scala | 11 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 2 - src/dotty/tools/dotc/printing/PlainPrinter.scala | 5 - src/dotty/tools/dotc/printing/RefinedPrinter.scala | 4 +- src/dotty/tools/dotc/sbt/ExtractAPI.scala | 4 +- src/dotty/tools/dotc/sbt/ExtractDependencies.scala | 3 +- src/dotty/tools/dotc/typer/Applications.scala | 4 - src/dotty/tools/dotc/typer/Implicits.scala | 5 +- src/dotty/tools/dotc/typer/Namer.scala | 25 +- src/dotty/tools/dotc/typer/Typer.scala | 3 +- 23 files changed, 104 insertions(+), 688 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 1c22329f1..7dfc09b3f 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -8,8 +8,6 @@ object Config { final val cacheMemberNames = true final val cacheImplicitScopes = true - final val newHK = true - final val checkCacheMembersNamed = false /** When updating a constraint bound, check that the constrained parameter diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 44d36abc5..5db9a6b0d 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -663,71 +663,6 @@ class Definitions { def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1 - // ----- LambdaXYZ traits ------------------------------------------ - - private var myLambdaTraits: Set[Symbol] = Set() - - /** The set of HigherKindedXYZ traits encountered so far */ - def lambdaTraitsOBS: Set[Symbol] = myLambdaTraits - - private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]() - - /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed - * to be the type parameters of a higher-kided type). This is a class symbol that - * would be generated by the following schema. - * - * trait LambdaXYZ extends Object with P1 with ... with Pn { - * type v_1 hk$0; ...; type v_N hk$N; - * type +$Apply - * } - * - * Here: - * - * - v_i are the variances of the bound symbols (i.e. +, -, or empty). - * - XYZ is a string of length N with one letter for each variant of a bound symbol, - * using `P` (positive variance), `N` (negative variance), `I` (invariant). - * - for each positive or negative variance v_i there is a parent trait Pj which - * is the same as LambdaXYZ except that it has `I` in i-th position. - */ - def LambdaTraitOBS(vcs: List[Int]): ClassSymbol = { - assert(vcs.nonEmpty) - - def varianceFlags(v: Int) = v match { - case -1 => Contravariant - case 0 => EmptyFlags - case 1 => Covariant - } - - val completer = new LazyType { - def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { - val cls = denot.asClass.classSymbol - val paramDecls = newScope - for (i <- 0 until vcs.length) - newTypeParam(cls, tpnme.hkArg(i), varianceFlags(vcs(i)), paramDecls) - newTypeField(cls, tpnme.hkApplyOBS, Covariant, paramDecls) - val parentTraitRefs = - for (i <- 0 until vcs.length if vcs(i) != 0) - yield LambdaTraitOBS(vcs.updated(i, 0)).typeRef - denot.info = ClassInfo( - ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls) - } - } - - val traitName = tpnme.hkLambdaOBS(vcs) - - def createTrait = { - val cls = newClassSymbol( - ScalaPackageClass, - traitName, - PureInterfaceCreationFlags | Synthetic, - completer) - myLambdaTraits += cls - cls - } - - LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait) - } - // ----- primitive value class machinery ------------------------------------------ /** This class would also be obviated by the implicit function type design */ diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index 1f221b5c8..b5704480a 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -116,14 +116,6 @@ object NameOps { def hkArgIndex: Int = name.drop(tpnme.hkArgPrefixLength).toString.toInt - def isLambdaTraitNameOBS(implicit ctx: Context): Boolean = - name.isTypeName && name.startsWith(tpnme.hkLambdaPrefixOBS) - - def lambdaTraitVariancesOBS(implicit ctx: Context): List[Int] = { - val vs = name.drop(tpnme.hkLambdaPrefixOBS.length) - vs.toList.map(c => tpnme.varianceSuffixesOBS.indexOf(c) - 1) - } - /** If the name ends with $nn where nn are * all digits, strip the $ and the digits. * Otherwise return the argument. diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index e82260201..c767f4c29 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -529,9 +529,7 @@ object StdNames { val synthSwitch: N = "$synthSwitch" - val hkApplyOBS: N = "$Apply" val hkArgPrefix: N = "$hk" - val hkLambdaPrefixOBS: N = "Lambda$" val hkArgPrefixHead: Char = hkArgPrefix.head val hkArgPrefixLength: Int = hkArgPrefix.length @@ -744,12 +742,8 @@ object StdNames { def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def hkLambdaOBS(vcs: List[Int]): TypeName = hkLambdaPrefixOBS ++ vcs.map(varianceSuffixOBS).mkString def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString - def varianceSuffixOBS(v: Int): Char = varianceSuffixesOBS.charAt(v + 1) - val varianceSuffixesOBS = "NIP" - final val Conforms = encode("<:<") } diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala index 4598aaa20..0d1c78e2f 100644 --- a/src/dotty/tools/dotc/core/Substituters.scala +++ b/src/dotty/tools/dotc/core/Substituters.scala @@ -179,24 +179,6 @@ trait Substituters { this: Context => .mapOver(tp) } - final def substRefinedThis(tp: Type, from: Type, to: Type, theMap: SubstRefinedThisMap): Type = - tp match { - case tp @ RefinedThis(binder) => - if (binder eq from) to else tp - case tp: NamedType => - if (tp.currentSymbol.isStatic) tp - else tp.derivedSelect(substRefinedThis(tp.prefix, from, to, theMap)) - case _: ThisType | _: BoundType | NoPrefix => - tp - case tp: RefinedType => - tp.derivedRefinedType(substRefinedThis(tp.parent, from, to, theMap), tp.refinedName, substRefinedThis(tp.refinedInfo, from, to, theMap)) - case tp: TypeAlias => - tp.derivedTypeAlias(substRefinedThis(tp.alias, from, to, theMap)) - case _ => - (if (theMap != null) theMap else new SubstRefinedThisMap(from, to)) - .mapOver(tp) - } - final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type = tp match { case tp @ RecThis(binder) => @@ -284,10 +266,6 @@ trait Substituters { this: Context => def apply(tp: Type): Type = substThis(tp, from, to, this) } - final class SubstRefinedThisMap(from: Type, to: Type) extends DeepTypeMap { - def apply(tp: Type): Type = substRefinedThis(tp, from, to, this) - } - final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this) } diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 124199678..e8053a740 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -483,10 +483,6 @@ object SymDenotations { final def isRefinementClass(implicit ctx: Context): Boolean = name.decode == tpnme.REFINE_CLASS - /** is this symbol a trait representing a type lambda? */ - final def isLambdaTraitOBS(implicit ctx: Context): Boolean = - isClass && name.startsWith(tpnme.hkLambdaPrefixOBS) && owner == defn.ScalaPackageClass - /** Is this symbol a package object or its module class? */ def isPackageObject(implicit ctx: Context): Boolean = { val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 0edc598dd..33aa060b5 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -81,7 +81,6 @@ object TypeApplications { object TypeLambda { def apply(argBindingFns: List[RecType => TypeBounds], bodyFn: RecType => Type)(implicit ctx: Context): Type = { - assert(Config.newHK) val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) var idx = 0 RecType.closeOver(rt => @@ -92,54 +91,19 @@ object TypeApplications { }) } - def applyOBS(variances: List[Int], - argBoundsFns: List[RefinedType => TypeBounds], - bodyFn: RefinedType => Type)(implicit ctx: Context): Type = { - def argRefinements(parent: Type, i: Int, bs: List[RefinedType => TypeBounds]): Type = bs match { - case b :: bs1 => - argRefinements(RefinedType(parent, tpnme.hkArg(i), b), i + 1, bs1) - case nil => - parent + def unapply(tp: Type)(implicit ctx: Context): Option[( /*List[Int], */ List[TypeBounds], Type)] = { + def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { + case t @ RefinedType(p, rname, rinfo: TypeBounds) if t.isTypeParam => + decompose(p, rinfo.bounds :: acc) + case t: RecType => + decompose(t.parent, acc) + case _ => + (acc, t) } - assert(variances.nonEmpty) - assert(argBoundsFns.length == variances.length) - RefinedType( - argRefinements(defn.LambdaTraitOBS(variances).typeRef, 0, argBoundsFns), - tpnme.hkApplyOBS, bodyFn(_).bounds.withVariance(1)) - } - - def unapply(tp: Type)(implicit ctx: Context): Option[(/*List[Int], */List[TypeBounds], Type)] = - if (Config.newHK) { - def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { - case t @ RefinedType(p, rname, rinfo: TypeBounds) if t.isTypeParam => - decompose(p, rinfo.bounds :: acc) - case t: RecType => - decompose(t.parent, acc) - case _ => - (acc, t) - } - decompose(tp, Nil) match { - case (Nil, _) => None - case x => Some(x) -// case (bindings, tp) => Some((Nil, bindings, tp)) - } + decompose(tp, Nil) match { + case (Nil, _) => None + case x => Some(x) } - else tp match { - case app @ RefinedType(parent, tpnme.hkApplyOBS, refinedInfo) => - val cls = parent.typeSymbol - val variances = cls.typeParams.map(_.variance) - def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match { - case t @ RefinedType(p, rname, rinfo) => - assert(rname.isHkArgName) - collectBounds(p, rinfo.bounds :: acc) - case TypeRef(_, lname) => - assert(lname.isLambdaTraitNameOBS) - acc - } - val argBounds = collectBounds(parent, Nil) - Some((argBounds, refinedInfo.argInfo)) - case _ => - None } } @@ -162,7 +126,7 @@ object TypeApplications { def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { case Nil => n == 0 - case TypeRef(RefinedThis(rt), sel) :: args1 => + case TypeRef(RecThis(rt), sel) :: args1 if false => rt.eq(tp) && sel == tpnme.hkArg(n - 1) && argsAreForwarders(args1, n - 1) case _ => false @@ -179,17 +143,13 @@ object TypeApplications { * * T { type p_1 v_1= U_1; ...; type p_n v_n= U_n } * - * where v_i, p_i are the variances and names of the type parameters of T, - * If `T`'s class symbol is a lambda trait, follow the refined type with a - * projection - * - * T { ... } # $Apply + * where v_i, p_i are the variances and names of the type parameters of T. */ object AppliedType { def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args) def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { - case tp: RefinedType if Config.newHK => + case tp: RefinedType => var refinements: List[RefinedType] = Nil var tycon = tp.stripTypeVar while (tycon.isInstanceOf[RefinedType]) { @@ -209,38 +169,8 @@ object TypeApplications { None } collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type]) - case TypeRef(prefix, tpnme.hkApplyOBS) if !Config.newHK => - unapp(prefix) case _ => - if (Config.newHK) None - else unapp(tp) match { - case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTraitOBS => - // We are seeing part of a lambda abstraction, not an applied type - None - case x => x - } - } - - private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match { - case _: RefinedType => - val tparams = tp.classSymbol.typeParams - if (tparams.isEmpty) None - else { - val argBuf = new mutable.ListBuffer[Type] - def stripArgs(tp: Type, n: Int): Type = - if (n == 0) tp - else tp match { - case tp @ RefinedType(parent, pname, rinfo) if pname == tparams(n - 1).name => - val res = stripArgs(parent, n - 1) - if (res.exists) argBuf += rinfo.argInfo - res - case _ => - NoType - } - val res = stripArgs(tp, tparams.length) - if (res.exists) Some((res, argBuf.toList)) else None - } - case _ => None + None } } @@ -256,51 +186,10 @@ object TypeApplications { args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(bounds(tparam))) } - /** The references `.this.$hk0, ..., .this.$hk`. */ - def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) = - List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i))) - /** The references `.this.$hk0, ..., .this.$hk`. */ def argRefs(rt: RecType, n: Int)(implicit ctx: Context) = List.range(0, n).map(i => RecThis(rt).select(tpnme.hkArg(i))) - /** Merge `tp1` and `tp2` under a common lambda, combining them with `op`. - * @param tparams1 The type parameters of `tp1` - * @param tparams2 The type parameters of `tp2` - * @pre tparams1.length == tparams2.length - * Produces the type lambda - * - * [v1 X1 B1, ..., vn Xn Bn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn]) - * - * where - * - * - variances `vi` are the variances of corresponding type parameters for `tp1` - * or `tp2`, or are 0 of the latter disagree. - * - bounds `Bi` are the intersection of the corresponding type parameter bounds - * of `tp1` and `tp2`. - */ - def hkCombineOBS(tp1: Type, tp2: Type, - tparams1: List[TypeSymbol], tparams2: List[TypeSymbol], op: (Type, Type) => Type) - (implicit ctx: Context): Type = { - val variances = (tparams1, tparams2).zipped.map { (tparam1, tparam2) => - val v1 = tparam1.variance - val v2 = tparam2.variance - if (v1 == v2) v1 else 0 - } - val bounds: List[RefinedType => TypeBounds] = - (tparams1, tparams2).zipped.map { (tparam1, tparam2) => - val b1: RefinedType => TypeBounds = - tp1.memberInfo(tparam1).bounds.internalizeFrom(tparams1) - val b2: RefinedType => TypeBounds = - tp2.memberInfo(tparam2).bounds.internalizeFrom(tparams2) - (rt: RefinedType) => b1(rt) & b2(rt) - } - val app1: RefinedType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) - val app2: RefinedType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) - val body: RefinedType => Type = rt => op(app1(rt), app2(rt)) - TypeLambda.applyOBS(variances, bounds, body) - } - private class InstMap(fullType: Type)(implicit ctx: Context) extends TypeMap { var localRecs: Set[RecType] = Set.empty var keptRefs: Set[Name] = Set.empty @@ -349,33 +238,10 @@ class TypeApplications(val self: Type) extends AnyVal { self.cls.typeParams case self: TypeRef => val tsym = self.symbol - if (tsym.isClass) tsym.typeParams - else tsym.infoOrCompleter match { - case completer: TypeParamsCompleter => - val tparams = completer.completerTypeParams(tsym) - if (Config.newHK) fallbackTypeParams(tparams.map(_.variance)) - else defn.LambdaTraitOBS(tparams.map(_.variance)).typeParams - case _ => - if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams - else - // We are facing a problem when computing the type parameters of an uncompleted - // abstract type. We can't access the bounds of the symbol yet because that - // would cause a cause a cyclic reference. So we return `Nil` instead - // and try to make up for it later. The acrobatics in Scala2Unpicker#readType - // for reading a TypeRef show what's needed. - Nil - } + if (tsym.isClass) tsym.typeParams else tsym.info.typeParams case self: RefinedType => - // inlined and optimized version of - // val sym = self.LambdaTrait - // if (sym.exists) return sym.typeParams - if (!Config.newHK && self.refinedName == tpnme.hkApplyOBS) { - val sym = self.parent.classSymbol - if (sym.isLambdaTraitOBS) return sym.typeParams - } val precedingParams = self.parent.typeParams.filterNot(_.memberName == self.refinedName) - if (Config.newHK && self.isTypeParam) precedingParams :+ self - else precedingParams + if (self.isTypeParam) precedingParams :+ self else precedingParams case self: RecType => self.parent.typeParams case self: SingletonType => @@ -389,9 +255,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** If `self` is a higher-kinded type, its type parameters $hk_i, otherwise Nil */ final def hkTypeParams(implicit ctx: Context): List[MemberBinding] = - if (Config.newHK) - if (isHK) typeParams else Nil - else LambdaTraitOBS.typeParams + if (isHK) typeParams else Nil /** If `self` is a generic class, its type parameter symbols, otherwise Nil */ final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match { @@ -462,19 +326,10 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** The Lambda trait underlying a type lambda */ - def LambdaTraitOBS(implicit ctx: Context): Symbol = self.stripTypeVar match { - case RefinedType(_, tpnme.hkApplyOBS, _) => - val sym = self.classSymbol - if (sym.isLambdaTraitOBS) sym else NoSymbol - case TypeBounds(lo, hi) => hi.LambdaTraitOBS - case _ => NoSymbol - } - /** Is self type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case self: RefinedType => self.refinedName == tpnme.hkApplyOBS || self.isTypeParam + case self: RefinedType => self.isTypeParam case self: SingletonType => false case self: TypeVar => self.origin.isHK case self: WildcardType => self.optBounds.isHK @@ -511,8 +366,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** is receiver of the form T#$Apply? */ def isHKApply(implicit ctx: Context): Boolean = self match { - case self @ RefinedType(_, name, _) => Config.newHK && name.isHkArgName && !self.isTypeParam - case TypeRef(_, name) => !Config.newHK && (name == tpnme.hkApplyOBS) + case self @ RefinedType(_, name, _) => name.isHkArgName && !self.isTypeParam case _ => false } @@ -520,18 +374,18 @@ class TypeApplications(val self: Type) extends AnyVal { * of this application exists and is not a lambda trait. * Equivalent to * - * self.classSymbol.exists && !self.classSymbol.isLambdaTrait + * self.classSymbol.exists * * but without forcing anything. */ - def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match { + def safeIsClassRef(implicit ctx: Context): Boolean = self.stripTypeVar match { case self: RefinedOrRecType => - self.parent.classNotLambda + self.parent.safeIsClassRef case self: TypeRef => self.denot.exists && { val sym = self.symbol - if (sym.isClass) !sym.isLambdaTraitOBS - else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda + sym.isClass || + sym.isCompleted && self.info.isAlias } case _ => false @@ -545,22 +399,6 @@ class TypeApplications(val self: Type) extends AnyVal { self } - /** Dealias type if it can be done without forcing anything */ - def saferDealias(implicit ctx: Context): Type = self match { - case self: TypeRef if self.denot.exists && self.symbol.isAliasType && self.symbol.isCompleted => - self.info.bounds.hi.stripTypeVar.safeDealias - case _ => - self - } - - /** Replace references to type parameters with references to hk arguments `this.$hk_i` - * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. - */ - def internalizeFrom[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RefinedType => T = - (rt: RefinedType) => - new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) - .apply(self).asInstanceOf[T] - /** Replace references to type parameters with references to hk arguments `this.$hk_i` * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. */ @@ -571,7 +409,6 @@ class TypeApplications(val self: Type) extends AnyVal { new ctx.SafeSubstMap(tparams.asInstanceOf[List[Symbol]], argRefs(rt, tparams.length)) .apply(self).asInstanceOf[T] case _ => - assert(Config.newHK) def mapRefs(rt: RecType) = new TypeMap { def apply(t: Type): Type = t match { case rthis: RecThis if tparams contains rthis.binder.parent => RecThis(rt) @@ -587,38 +424,20 @@ class TypeApplications(val self: Type) extends AnyVal { * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U) */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { - - /** Replace references to type parameters with references to hk arguments `this.$hk_i` - * Care is needed not to cause cycles, hence `SafeSubstMap`. - */ - def internalize[T <: Type](tp: T) = - (rt: RefinedType) => - new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length)) - .apply(tp).asInstanceOf[T] - def expand(tp: Type) = - if (Config.newHK) - TypeLambda( - tparams.map(tparam => - tparam.memberBoundsAsSeenFrom(self) - .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .recursify(tparams)), - tp.recursify(tparams)) - else - TypeLambda.applyOBS( - tparams.map(_.variance), - tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)), - internalize(tp)) + TypeLambda( + tparams.map(tparam => + tparam.memberBoundsAsSeenFrom(self) + .withBindingKind(BindingKind.fromVariance(tparam.variance)) + .recursify(tparams)), + tp.recursify(tparams)) assert(!isHK, self) self match { case self: TypeAlias => self.derivedTypeAlias(expand(self.alias.normalizeHkApply)) case self @ TypeBounds(lo, hi) => - if (Config.newHK) - self.derivedTypeBounds(lo, expand(hi.normalizeHkApply)) - else - self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi.normalizeHkApply))) + self.derivedTypeBounds(lo, expand(hi.normalizeHkApply)) case _ => expand(self) } } @@ -652,7 +471,7 @@ class TypeApplications(val self: Type) extends AnyVal { * - going from a wildcard type to its upper bound */ def normalizeHkApply(implicit ctx: Context): Type = self.strictDealias match { - case self1 @ RefinedType(_, rname, _) if Config.newHK && rname.isHkArgName && self1.typeParams.isEmpty => + case self1 @ RefinedType(_, rname, _) if rname.isHkArgName && self1.typeParams.isEmpty => val inst = new InstMap(self) def instTop(tp: Type): Type = tp.strictDealias match { @@ -709,7 +528,7 @@ class TypeApplications(val self: Type) extends AnyVal { * to eta expand them. */ def isEtaExpandable(implicit ctx: Context) = self match { - case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitNameOBS + case self: TypeRef => self.symbol.isClass case _ => false } @@ -782,7 +601,7 @@ class TypeApplications(val self: Type) extends AnyVal { def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = { val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self - else if (Config.newHK) { + else { def adaptArg(arg: Type): Type = arg match { case arg @ TypeLambda(tparamBounds, body) if !arg.typeParams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && @@ -804,25 +623,6 @@ class TypeApplications(val self: Type) extends AnyVal { } adaptArg(self) } - else { - def adaptArg(arg: Type): Type = arg match { - case arg: TypeRef if arg.symbol.isLambdaTraitOBS && - !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.memberVariance) && - arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) => - arg.prefix.select(bound.LambdaTraitOBS) - case arg: RefinedType => - arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo) - case arg: RecType => - arg.derivedRecType(adaptArg(arg.parent)) - case arg @ TypeAlias(alias) => - arg.derivedTypeAlias(adaptArg(alias)) - case arg @ TypeBounds(lo, hi) => - arg.derivedTypeBounds(lo, adaptArg(hi)) - case _ => - arg - } - adaptArg(self) - } } /** Encode @@ -843,8 +643,6 @@ class TypeApplications(val self: Type) extends AnyVal { final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { def substHkArgs = new TypeMap { def apply(tp: Type): Type = tp match { - case TypeRef(RefinedThis(rt), name) if rt.eq(self) && name.isHkArgName => - args(name.hkArgIndex) case TypeRef(RecThis(rt), name) if rt.eq(self) && name.isHkArgName => args(name.hkArgIndex) case _ => @@ -886,8 +684,7 @@ class TypeApplications(val self: Type) extends AnyVal { assert(args.nonEmpty) matchParams(self, typParams, args) match { case refined @ RefinedType(_, pname, _) if pname.isHkArgName => - if (Config.newHK) refined.betaReduce - else TypeRef(refined, tpnme.hkApplyOBS) + refined.betaReduce // TODO Move to matchparams case refined => refined } @@ -906,8 +703,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") - if (Config.newHK) fallbackTypeParams(args map alwaysZero) - else defn.LambdaTraitOBS(args map alwaysZero).typeParams + fallbackTypeParams(args map alwaysZero) case _ => typeParams } @@ -1070,33 +866,4 @@ class TypeApplications(val self: Type) extends AnyVal { case JavaArrayType(elemtp) => elemtp case _ => firstBaseArgInfo(defn.SeqClass) } - - /** Does this type contain RefinedThis type with `target` as its underling - * refinement type? - */ - def containsRefinedThis(target: Type)(implicit ctx: Context): Boolean = { - def recur(tp: Type): Boolean = tp.stripTypeVar match { - case RefinedThis(tp) => - tp eq target - case tp: NamedType => - if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix) - else tp.info match { - case TypeAlias(alias) => recur(alias) - case _ => recur(tp.prefix) - } - case tp: RefinedType => - recur(tp.refinedInfo) || recur(tp.parent) - case tp: RecType => - recur(tp.parent) - case tp: TypeBounds => - recur(tp.lo) || recur(tp.hi) - case tp: AnnotatedType => - recur(tp.underlying) - case tp: AndOrType => - recur(tp.tp1) || recur(tp.tp2) - case _ => - false - } - !Config.newHK && recur(self) - } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index c1b275b70..cf3086323 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -178,11 +178,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { && !tp1.isInstanceOf[WithFixedSym] && !tp2.isInstanceOf[WithFixedSym] ) || - compareHkApplyOBS(tp1, tp2, inOrder = true) || - compareHkApplyOBS(tp2, tp1, inOrder = false) || thirdTryNamed(tp1, tp2) case _ => - compareHkApplyOBS(tp2, tp1, inOrder = false) || secondTry(tp1, tp2) } } @@ -259,7 +256,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { if (tp1.prefix.isStable) return false case _ => } - compareHkApplyOBS(tp1, tp2, inOrder = true) || thirdTry(tp1, tp2) case tp1: PolyParam => def flagNothingBound = { @@ -381,7 +377,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } else // fast path, in particular for refinements resulting from parameterization. isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) && - isSubType(tp1, skipped2) // TODO swap? + isSubType(tp1, skipped2) } compareRefined case tp2: RecType => @@ -547,9 +543,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * - `B` satisfies predicate `p`. */ private def testLifted(tp1: Type, tp2: Type, tparams: List[MemberBinding], p: Type => Boolean): Boolean = { - val classBounds = - if (Config.newHK) tp2.classSymbols - else tp2.member(tpnme.hkApplyOBS).info.classSymbols + val classBounds = tp2.classSymbols def recur(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => val baseRef = tp1.baseTypeRef(bc) @@ -564,61 +558,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { recur(tp1.baseClasses) } - /** If `projection` is a hk projection T#$apply with a constrainable poly param - * as type constructor and `other` is not a hk projection, then perform the following - * steps: - * - * (1) If not `inOrder` then perform the next steps until they all succeed - * for each base type of other which - * - derives from a class bound of `projection`, - * - has the same number of type parameters than `projection` - * - has type parameter variances which conform to those of `projection`. - * If `inOrder` then perform the same steps on the original `other` type. - * - * (2) Try to eta expand the constructor of `other`. - * - * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter - * by the eta expansion of step (2) reapplied to the projection's arguments. - * (3b) In normal mode, try to unify the projection's hk constructor parameter with - * the eta expansion of step(2) - * - * (4) If `inOrder`, test `projection <: other` else test `other <: projection`. - */ - def compareHkApplyOBS(projection: NamedType, other: Type, inOrder: Boolean): Boolean = { - def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($projection, $other, inOrder = $inOrder, constr = $tp)", subtyping) { - tp match { - case tp: TypeVar => tryInfer(tp.underlying) - case param: PolyParam if canConstrain(param) => - - def unifyWith(liftedOther: Type): Boolean = { - subtyping.println(i"unify with $liftedOther") - liftedOther.typeConstructor.widen match { - case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty => - val (ok, projection1) = - if (ctx.mode.is(Mode.TypevarsMissContext)) - (true, EtaExpansion(tycon).appliedTo(projection.argInfos)) - else - (tryInstantiate(param, EtaExpansion(tycon)), projection) - ok && - (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1)) - case _ => - false - } - } - val hkTypeParams = param.typeParams - subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApplyOBS).info.classSymbols}") - subtyping.println(i"base classes = ${other.baseClasses}") - subtyping.println(i"type params = ${hkTypeParams.map(_.memberName)}") - if (inOrder) unifyWith(other) - else testLifted(other, projection.prefix, hkTypeParams, unifyWith) - case _ => - false - } - } - !Config.newHK && projection.name == tpnme.hkApplyOBS && !other.isHKApply && - tryInfer(projection.prefix.typeConstructor.dealias) - } - /** Handle subtype tests * * app <:< other if inOrder = true @@ -695,7 +634,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } } - Config.newHK && app.isHKApply && !other.isHKApply && { + app.isHKApply && !other.isHKApply && { val reduced = if (inOrder) app else app.normalizeHkApply if (reduced ne app) if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) @@ -824,10 +763,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * rebase both itself and the member info of `tp` on a freshly created skolem type. */ protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = { - val rebindNeeded = tp2.refinementRefersToThis - val base = if (rebindNeeded) ensureStableSingleton(tp1) else tp1 - val rinfo2 = if (rebindNeeded) tp2.refinedInfo.substRefinedThis(tp2, base) else tp2.refinedInfo - val mbr = base.member(name) + val rinfo2 = tp2.refinedInfo + val mbr = tp1.member(name) def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2) @@ -843,15 +780,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && { rinfo2 match { case rinfo2: TypeAlias => - !defn.isBottomType(base.widen) && (base select name) =:= rinfo2.alias + !defn.isBottomType(tp1.widen) && (tp1 select name) =:= rinfo2.alias case _ => false } } - def varianceMatches = true // TODO: fill in - - /*>|>*/ ctx.traceIndented(i"hasMatchingMember($base . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ { - (memberMatches || selfReferentialMatch) && varianceMatches + /*>|>*/ ctx.traceIndented(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ { + memberMatches || selfReferentialMatch } } @@ -870,10 +805,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * @return The parent type of `tp2` after skipping the matching refinements. */ private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match { - case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) - if name1 == tp2.refinedName && - !tp2.refinementRefersToThis && - !tp1.refinementRefersToThis => + case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName => tp2.parent match { case parent2: RefinedType => skipMatching(parent1, parent2) case parent2 => parent2 @@ -1224,39 +1156,29 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * allowing both interpretations. A possible remedy is to be somehow stricter * in where we allow which interpretation. */ - private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = - if (Config.newHK) { - val tparams1 = tp1.typeParams - val tparams2 = tp2.typeParams - if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) - else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) - else { - val bindings: List[RecType => TypeBounds] = - (tparams1, tparams2).zipped.map { (tparam1, tparam2) => - val b1: RecType => TypeBounds = - tparam1.memberBoundsAsSeenFrom(tp1).recursify(tparams1) - val b2: RecType => TypeBounds = - tparam2.memberBoundsAsSeenFrom(tp2).recursify(tparams2) - (rt: RecType) => (b1(rt) & b2(rt)) - .withBindingKind( - BindingKind.fromVariance( - (tparam1.memberVariance + tparam2.memberVariance) / 2)) - } - val app1: RecType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) - val app2: RecType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) - val body: RecType => Type = rt => op(app1(rt), app2(rt)) - TypeLambda(bindings, body) - } - } + private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = { + val tparams1 = tp1.typeParams + val tparams2 = tp2.typeParams + if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) + else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) else { - val tparams1 = tp1.typeParamSymbols - val tparams2 = tp2.typeParamSymbols - def onlyNamed(tparams: List[TypeSymbol]) = tparams.forall(!_.is(ExpandedName)) - if (tparams1.isEmpty || tparams2.isEmpty || - onlyNamed(tparams1) && onlyNamed(tparams2)) op(tp1, tp2) - else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) - else hkCombineOBS(tp1, tp2, tparams1, tparams2, op) + val bindings: List[RecType => TypeBounds] = + (tparams1, tparams2).zipped.map { (tparam1, tparam2) => + val b1: RecType => TypeBounds = + tparam1.memberBoundsAsSeenFrom(tp1).recursify(tparams1) + val b2: RecType => TypeBounds = + tparam2.memberBoundsAsSeenFrom(tp2).recursify(tparams2) + (rt: RecType) => (b1(rt) & b2(rt)) + .withBindingKind( + BindingKind.fromVariance( + (tparam1.memberVariance + tparam2.memberVariance) / 2)) + } + val app1: RecType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) + val app2: RecType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) + val body: RecType => Type = rt => op(app1(rt), app2(rt)) + TypeLambda(bindings, body) } + } /** Try to distribute `&` inside type, detect and handle conflicts * @pre !(tp1 <: tp2) && !(tp2 <:< tp1) -- these cases were handled before @@ -1268,10 +1190,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp1: RefinedType => tp2 match { case tp2: RefinedType if tp1.refinedName == tp2.refinedName => - tp1.derivedRefinedType( - tp1.parent & tp2.parent, - tp1.refinedName, - tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1))) + tp1.derivedRefinedType(tp1.parent & tp2.parent, tp1.refinedName, tp1.refinedInfo & tp2.refinedInfo) case _ => NoType } diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index ca49d3d3c..6b75b574e 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -228,7 +228,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. return tp1.derivedRefinedType( approximateUnion(OrType(tp1.parent, tp2.parent)), tp1.refinedName, - homogenizedUnion(tp1.refinedInfo, tp2.refinedInfo).substRefinedThis(tp2, RefinedThis(tp1))) + homogenizedUnion(tp1.refinedInfo, tp2.refinedInfo)) //.ensuring { x => println(i"approx or $tp1 | $tp2 = $x\n constr = ${ctx.typerState.constraint}"); true } // DEBUG case _ => } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 2120706f6..cd1b5739d 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -51,7 +51,7 @@ object Types { * | | +--- SuperType * | | +--- ConstantType * | | +--- MethodParam - * | | +----RefinedThis + * | | +----RecThis * | | +--- SkolemType * | +- PolyParam * | +- RefinedOrRecType -+-- RefinedType @@ -507,9 +507,7 @@ object Types { } def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) - val rinfo = - if (tp.refinementRefersToThis) tp.refinedInfo.substRefinedThis(tp, pre) - else tp.refinedInfo + val rinfo = tp.refinedInfo if (name.isTypeName) { // simplified case that runs more efficiently val jointInfo = if (rinfo.isAlias) rinfo @@ -577,6 +575,7 @@ object Types { ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches } + //assert(ctx.findMemberCount < 20) try go(this) catch { case ex: Throwable => @@ -964,62 +963,14 @@ object Types { * * P { type T = String, type R = P{...}.T } # R --> String * - * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply". - * In this case the rhs of the apply is returned with all references to lambda argument types - * substituted by their definitions. - * * (*) normalizes means: follow instantiated typevars and aliases. */ def lookupRefined(name: Name)(implicit ctx: Context): Type = { def loop(pre: Type): Type = pre.stripTypeVar match { case pre: RefinedType => - object instantiate extends TypeMap { - var isSafe = true - def apply(tp: Type): Type = - if (!isSafe) tp - else tp match { - case TypeRef(RefinedThis(`pre`), name) if name.isHkArgName => - member(name).info match { - case TypeAlias(alias) => alias - case _ => isSafe = false; tp - } - case tp: TypeVar if !tp.inst.exists => - isSafe = false - tp - case _ => - mapOver(tp) - } - } - def instArg(tp: Type): Type = tp match { - case tp @ TypeAlias(TypeRef(RefinedThis(`pre`), name)) if name.isHkArgName => - member(name).info match { - case TypeAlias(alias) => tp.derivedTypeAlias(alias) // needed to keep variance - case bounds => bounds - } - case _ => - instantiate(tp) - } - def instTop(tp: Type): Type = tp.stripTypeVar match { - case tp: RefinedType => - tp.derivedRefinedType(instTop(tp.parent), tp.refinedName, instArg(tp.refinedInfo)) - case _ => - instantiate(tp) - } - /** Reduce rhs of $hkApply to make it stand alone */ - def betaReduce(tp: Type) = { - val reduced = instTop(tp) - if (instantiate.isSafe) reduced else NoType - } pre.refinedInfo match { case TypeAlias(alias) => - if (pre.refinedName ne name) loop(pre.parent) - else alias match { - case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1) - case _ => - if (!pre.refinementRefersToThis) alias - else if (name == tpnme.hkApplyOBS) betaReduce(alias) - else NoType - } + if (pre.refinedName ne name) loop(pre.parent) else alias case _ => loop(pre.parent) } case pre: RecType => @@ -1029,8 +980,6 @@ object Types { candidate } else NoType - case RefinedThis(binder) => - binder.lookupRefined(name) case SkolemType(tp) => tp.lookupRefined(name) case pre: WildcardType => @@ -1219,10 +1168,6 @@ object Types { final def substThisUnlessStatic(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type = if (cls.isStaticOwner) this else ctx.substThis(this, cls, tp, null) - /** Substitute all occurrences of `SkolemType(binder)` by `tp` */ - final def substRefinedThis(binder: Type, tp: Type)(implicit ctx: Context): Type = - ctx.substRefinedThis(this, binder, tp, null) - /** Substitute all occurrences of `RecThis(binder)` by `tp` */ final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type = ctx.substRecThis(this, binder, tp, null) @@ -1643,7 +1588,7 @@ object Types { * to an (unbounded) wildcard type. * * (2) Reduce a type-ref `T { X = U; ... } # X` to `U` - * provided `U` does not refer with a RefinedThis to the + * provided `U` does not refer with a RecThis to the * refinement type `T { X = U; ... }` */ def reduceProjection(implicit ctx: Context): Type = { @@ -1715,13 +1660,6 @@ object Types { else if (isType) { val res = prefix.lookupRefined(name) if (res.exists) res - else if (name == tpnme.hkApplyOBS && prefix.classNotLambda) { - // After substitution we might end up with a type like - // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply` - // where C is a class. In that case we eta expand `C`. - if (defn.isBottomType(prefix)) prefix.classSymbol.typeRef - else derivedSelect(prefix.EtaExpandCore) - } else if (Config.splitProjections) prefix match { case prefix: AndType => @@ -1999,9 +1937,7 @@ object Types { } object TypeRef { - def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = - if (name == tpnme.hkApplyOBS && prefix.classNotLambda) - assert(false, s"bad type : $prefix.$name does not allow $$Apply projection") + def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = () /** Create type ref with given prefix and name */ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = { @@ -2128,34 +2064,15 @@ object Types { * @param infoFn: A function that produces the info of the refinement declaration, * given the refined type itself. */ - abstract case class RefinedType(private var myParent: Type, refinedName: Name, private var myRefinedInfo: Type) + abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType with BindingType with MemberBinding { - final def parent = myParent - final def refinedInfo = myRefinedInfo - - private var refinementRefersToThisCache: Boolean = _ - private var refinementRefersToThisKnown: Boolean = false - - def refinementRefersToThis(implicit ctx: Context): Boolean = { - if (!refinementRefersToThisKnown) { - refinementRefersToThisCache = refinedInfo.containsRefinedThis(this) - refinementRefersToThisKnown = true - } - refinementRefersToThisCache - } - override def underlying(implicit ctx: Context) = parent private def badInst = throw new AssertionError(s"bad instantiation: $this") def checkInst(implicit ctx: Context): this.type = { - if (refinedName == tpnme.hkApplyOBS) - parent.stripTypeVar match { - case RefinedType(_, name, _) if name.isHkArgName => // ok - case _ => badInst - } this } @@ -2197,9 +2114,15 @@ object Types { def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this - else - RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt))) - .betaReduce + else { + // `normalizedRefinedInfo` is `refinedInfo` reduced everywhere via `reduceProjection`. + // (this is achieved as a secondary effect of substRecThis). + // It turns out this normalization is now needed; without it there's + // A Y-check error (incompatible types involving hk lambdas) for dotty itself. + // TODO: investigate and, if possible, drop after revision. + val normalizedRefinedInfo = refinedInfo.substRecThis(dummyRec, dummyRec) + RefinedType(parent, refinedName, normalizedRefinedInfo).betaReduce + } /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = @@ -2232,36 +2155,16 @@ object Types { override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)" } - class CachedRefinedType(refinedName: Name) extends RefinedType(NoType, refinedName, NoType) - - class PreHashedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int) + class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int) extends RefinedType(parent, refinedName, refinedInfo) { myHash = hc override def computeHash = unsupported("computeHash") } object RefinedType { - def make(parent: Type, names: List[Name], infoFns: List[RefinedType => Type])(implicit ctx: Context): Type = + def make(parent: Type, names: List[Name], infos: List[Type])(implicit ctx: Context): Type = if (names.isEmpty) parent - else make(RefinedType(parent, names.head, infoFns.head), names.tail, infoFns.tail) - - def recursive(parentFn: RefinedType => Type, names: List[Name], infoFns: List[RefinedType => Type])(implicit ctx: Context): RefinedType = { - val refinements: List[RefinedType] = names.map(new CachedRefinedType(_)) - val last = refinements.last - (refinements, infoFns).zipped.foreach((rt, infoFn) => rt.myRefinedInfo = infoFn(last)) - (parentFn(last) /: refinements) { (parent, rt) => - rt.myParent = parent - ctx.base.uniqueRefinedTypes.enterIfNew(rt).checkInst - }.asInstanceOf[RefinedType] - } - - def apply(parent: Type, name: Name, infoFn: RefinedType => Type)(implicit ctx: Context): RefinedType = { - assert(!ctx.erasedTypes || ctx.mode.is(Mode.Printing)) - val res: RefinedType = new CachedRefinedType(name) - res.myParent = parent - res.myRefinedInfo = infoFn(res) - ctx.base.uniqueRefinedTypes.enterIfNew(res).checkInst - } + else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail) def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = { assert(!ctx.erasedTypes) @@ -2722,7 +2625,7 @@ object Types { } } - // ----- Bound types: MethodParam, PolyParam, RefinedThis -------------------------- + // ----- Bound types: MethodParam, PolyParam -------------------------- abstract class BoundType extends CachedProxyType with ValueType { type BT <: Type @@ -2806,22 +2709,6 @@ object Types { } } - /** a this-reference to an enclosing refined type `binder`. */ - case class RefinedThis(binder: RefinedType) extends BoundType with SingletonType { - type BT = RefinedType - override def underlying(implicit ctx: Context) = binder - def copyBoundType(bt: BT) = RefinedThis(bt) - - // need to customize hashCode and equals to prevent infinite recursion for - // refinements that refer to the refinement type via this - override def computeHash = addDelta(binder.identityHash, 41) - override def equals(that: Any) = that match { - case that: RefinedThis => this.binder eq that.binder - case _ => false - } - override def toString = s"RefinedThis(${binder.hashCode})" - } - /** a self-reference to an enclosing recursive type. */ case class RecThis(binder: RecType) extends BoundType with SingletonType { type BT = RecType @@ -3892,6 +3779,8 @@ object Types { class MergeError(msg: String, val tp1: Type, val tp2: Type) extends TypeError(msg) + @sharable val dummyRec = new RecType(rt => NoType) + // ----- Debug --------------------------------------------------------- @sharable var debugTrace = false diff --git a/src/dotty/tools/dotc/core/Uniques.scala b/src/dotty/tools/dotc/core/Uniques.scala index b00508d60..cb9670c69 100644 --- a/src/dotty/tools/dotc/core/Uniques.scala +++ b/src/dotty/tools/dotc/core/Uniques.scala @@ -107,8 +107,8 @@ object Uniques { def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = { val h = doHash(refinedName, refinedInfo, parent) - def newType = new PreHashedRefinedType(parent, refinedName, refinedInfo, h) - if (monitored) recordCaching(h, classOf[PreHashedRefinedType]) + def newType = new CachedRefinedType(parent, refinedName, refinedInfo, h) + if (monitored) recordCaching(h, classOf[CachedRefinedType]) if (h == NotCached) newType else { val r = findPrevious(h, parent, refinedName, refinedInfo) diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index e9708961a..38d55e0e8 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -259,8 +259,7 @@ object TastyFormat { final val TYPEREFdirect = 66 final val TERMREFpkg = 67 final val TYPEREFpkg = 68 - final val REFINEDthis = 69 - final val RECthis = REFINEDthis // !!! + final val RECthis = 69 final val BYTEconst = 70 final val SHORTconst = 71 final val CHARconst = 72 diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 9be5c8bcf..d6e6c4d6b 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -211,11 +211,6 @@ class TreePickler(pickler: TastyPickler) { case tpe: SuperType => writeByte(SUPERtype) withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)} - case tpe: RefinedThis => - writeByte(REFINEDthis) - val binderAddr = pickledTypes.get(tpe.binder) - assert(binderAddr != null, tpe.binder) - writeRef(binderAddr.asInstanceOf[Addr]) case tpe: RecThis => writeByte(RECthis) val binderAddr = pickledTypes.get(tpe.binder) diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index d9a062263..2d230c630 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -260,7 +260,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { val parent = readType() val ttag = nextUnsharedTag if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName - RefinedType(parent, name, rt => registeringType(rt, readType())) + RefinedType(parent, name, readType()) // Note that the lambda "rt => ..." is not equivalent to a wildcard closure! // Eta expansion of the latter puts readType() out of the expression. case APPLIEDtype => @@ -325,8 +325,6 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { readPackageRef().termRef case TYPEREF => val name = readName().toTypeName - if (name.isLambdaTraitNameOBS) // Make sure corresponding lambda trait exists - defn.LambdaTraitOBS(name.lambdaTraitVariancesOBS) TypeRef(readType(), name) case TERMREF => readNameSplitSig() match { @@ -337,11 +335,8 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { ThisType.raw(readType().asInstanceOf[TypeRef]) case RECtype => RecType(rt => registeringType(rt, readType())) - case REFINEDthis => - readTypeRef() match { - case t: RefinedType => RefinedThis(t) - case t: RecType => RecThis(t) - } + case RECthis => + RecThis(readTypeRef().asInstanceOf[RecType]) case SHARED => val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 557a9df74..18a4e83b6 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -632,8 +632,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case info => tp.derivedRefinedType(parent1, name, info) } - case tp @ TypeRef(pre, tpnme.hkApplyOBS) => - tp.derivedSelect(elim(pre)) case _ => tp } diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index a9f5b771a..a8888fd3c 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -50,9 +50,6 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) - case tp @ TypeRef(_, tpnme.hkApplyOBS) => - val tp1 = tp.reduceProjection - if (tp1 eq tp) tp else homogenize(tp1) case tp: RefinedType => tp.normalizeHkApply case tp: SkolemType => @@ -250,8 +247,6 @@ class PlainPrinter(_ctx: Context) extends Printer { val idx = openRecs.reverse.indexOf(tp.binder) if (idx >= 0) selfRecName(idx + 1) else "{...}.this" // TODO move underlying type to an addendum, e.g. ... z3 ... where z3: ... - case tp: RefinedThis => - s"${nameString(tp.binder.typeSymbol)}{...}.this" case tp: SkolemType => if (homogenizedView) toText(tp.info) else tp.repr } diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 61e29982b..3da977b31 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -118,9 +118,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close case tp @ TypeLambda(argBoundss, body) => - val variances = - if (Config.newHK) argBoundss.map(b => BindingKind.toVariance(b.bindingKind)) - else tp.classSymbol.typeParams.map(_.variance) + val variances = argBoundss.map(b => BindingKind.toVariance(b.bindingKind)) val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar val paramNames = argBoundss.indices.toList.map(prefix.toString + _) val instantiate = new TypeMap { diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala index c0a3c3dfe..d4b38c66e 100644 --- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -403,8 +403,8 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder apiType(tpe) case tp: ThisType => apiThis(tp.cls) - case RefinedThis(binder) => - apiThis(binder.typeSymbol) + case RecThis(binder) => + apiThis(binder.typeSymbol) // !!! this is almost certainly wrong !!! case tp: ParamType => new api.ParameterRef(tp.paramName.toString) case tp: LazyRef => diff --git a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 1f19a1058..026a518ce 100644 --- a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -163,8 +163,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp sym.eq(NoSymbol) || sym.isEffectiveRoot || sym.isAnonymousFunction || - sym.isAnonymousClass || - sym.isLambdaTraitOBS + sym.isAnonymousClass private def addInheritanceDependency(sym: Symbol): Unit = _topLevelInheritanceDependencies += sym.topLevelClass diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index cdbf692cd..caae422d3 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -634,7 +634,6 @@ trait Applications extends Compatibility { self: Typer => typedFn.tpe.widen match { case pt: PolyType => if (typedArgs.length <= pt.paramBounds.length && !isNamed) - typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg) if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) { val arg = typedArgs.head checkClassType(arg.tpe, arg.pos, traitReq = false, stablePrefixReq = false) @@ -644,9 +643,6 @@ trait Applications extends Compatibility { self: Typer => assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) } - def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree = - if (Config.newHK) tree else tree.withType(tree.tpe.etaExpandIfHK(bound)) - /** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray. * It is performed during typer as creation of generic arrays needs a classTag. * we rely on implicit search to find one. diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index 1b02f7e70..a5246cf6b 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -284,13 +284,10 @@ trait ImplicitRunInfo { self: RunInfo => override implicit protected val ctx: Context = liftingCtx override def stopAtStatic = true def apply(tp: Type) = tp match { - case tp: TypeRef if tp.symbol.isLambdaTraitOBS => - defn.AnyType case tp: TypeRef if tp.symbol.isAbstractOrAliasType => val pre = tp.prefix def joinClass(tp: Type, cls: ClassSymbol) = - if (cls.isLambdaTraitOBS) tp - else AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner)) + AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner)) val lead = if (tp.prefix eq NoPrefix) defn.AnyType else apply(tp.prefix) (lead /: tp.classSymbols)(joinClass) case tp: TypeVar => diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index bc8f8e281..bf36942e0 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -973,29 +973,6 @@ class Namer { typer: Typer => } ensureUpToDate(sym.typeRef, dummyInfo) ensureUpToDate(sym.typeRef.appliedTo(tparamSyms.map(_.typeRef)), TypeBounds.empty) - - if (Config.newHK) sym.info - else etaExpandArgsOBS.apply(sym.info) - } - - /** Eta expand all class types C appearing as arguments to a higher-kinded - * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary - * because in `typedAppliedTypeTree` we might have missed some eta expansions - * of arguments in F-bounds, because the recursive type was initialized with - * TypeBounds.empty. - */ - def etaExpandArgsOBS(implicit ctx: Context) = new TypeMap { - def apply(tp: Type): Type = tp match { - case tp: RefinedType => - val args = tp.argInfos.mapconserve(this) - if (args.nonEmpty) { - val tycon = tp.withoutArgs(args) - val tycon1 = this(tycon) - val tparams = tycon.typeParams - val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args - if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1) - } else mapOver(tp) - case _ => mapOver(tp) - } + sym.info } } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 96bc2ab35..2ab06bf70 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -949,8 +949,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.memberBounds) else (arg, WildcardType) - val arg1 = typed(desugaredArg, argPt) - adaptTypeArg(arg1, tparam.memberBounds) + typed(desugaredArg, argPt) } args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] } -- cgit v1.2.3 From 60d81f81ddfc85719fd303e8d15d3891adbf4dfd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:00:59 +0200 Subject: Start new, direct HK scheme - Re-introduce newHK option. Label some things that will be removed with OLD. --- src/dotty/tools/dotc/config/Config.scala | 2 + src/dotty/tools/dotc/core/ConstraintHandling.scala | 4 +- src/dotty/tools/dotc/core/MemberBinding.scala | 1 + src/dotty/tools/dotc/core/NameOps.scala | 12 +- src/dotty/tools/dotc/core/StdNames.scala | 14 +- src/dotty/tools/dotc/core/SymDenotations.scala | 1 + src/dotty/tools/dotc/core/TypeApplications.scala | 223 +++++++++++++-------- src/dotty/tools/dotc/core/TypeComparer.scala | 128 +++++++++--- src/dotty/tools/dotc/core/TypeErasure.scala | 2 + src/dotty/tools/dotc/core/TypeOps.scala | 4 +- src/dotty/tools/dotc/core/Types.scala | 155 ++++++++++++-- src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 8 +- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 5 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 9 + src/dotty/tools/dotc/printing/PlainPrinter.scala | 35 +++- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 29 +-- src/dotty/tools/dotc/typer/TypeAssigner.scala | 21 +- src/dotty/tools/dotc/typer/Variances.scala | 11 + 18 files changed, 478 insertions(+), 186 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 7dfc09b3f..bf6f8493b 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -10,6 +10,8 @@ object Config { final val checkCacheMembersNamed = false + final val newHK = false + /** When updating a constraint bound, check that the constrained parameter * does not appear at the top-level of either of its bounds. */ diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index e7b05af43..00b8a5d25 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -287,8 +287,8 @@ trait ConstraintHandling { if (!addParamBound(bound)) NoType else if (fromBelow) defn.NothingType else defn.AnyType - case bound: RefinedType => - bound.normalizeHkApply + case bound: RefinedType if !Config.newHK => + bound.normalizeHkApplyOLD case _ => bound } diff --git a/src/dotty/tools/dotc/core/MemberBinding.scala b/src/dotty/tools/dotc/core/MemberBinding.scala index 6f081c542..bff8b30a0 100644 --- a/src/dotty/tools/dotc/core/MemberBinding.scala +++ b/src/dotty/tools/dotc/core/MemberBinding.scala @@ -7,6 +7,7 @@ import Types.{Type, TypeBounds} /** A common super trait of Symbol and Refinement. * Used to capture the attributes of type parameters * which can be implemented as either symbols or refinements. + * TODO: Rename (TypeParamInfo?) */ trait MemberBinding { diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index b5704480a..120540dc7 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -102,19 +102,19 @@ object NameOps { } /** Is this the name of a higher-kinded type parameter of a Lambda? */ - def isHkArgName = + def isHkArgNameOLD = name.length > 0 && - name.head == tpnme.hkArgPrefixHead && - name.startsWith(tpnme.hkArgPrefix) && { - val digits = name.drop(tpnme.hkArgPrefixLength) + name.head == tpnme.hkArgPrefixHeadOLD && + name.startsWith(tpnme.hkArgPrefixOLD) && { + val digits = name.drop(tpnme.hkArgPrefixLengthOLD) digits.length <= 4 && digits.forall(_.isDigit) } /** The index of the higher-kinded type parameter with this name. * Pre: isLambdaArgName. */ - def hkArgIndex: Int = - name.drop(tpnme.hkArgPrefixLength).toString.toInt + def hkArgIndexOLD: Int = + name.drop(tpnme.hkArgPrefixLengthOLD).toString.toInt /** If the name ends with $nn where nn are * all digits, strip the $ and the digits. diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index c767f4c29..0adf80d8f 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -529,9 +529,9 @@ object StdNames { val synthSwitch: N = "$synthSwitch" - val hkArgPrefix: N = "$hk" - val hkArgPrefixHead: Char = hkArgPrefix.head - val hkArgPrefixLength: Int = hkArgPrefix.length + val hkArgPrefixOLD: N = "$hk" + val hkArgPrefixHeadOLD: Char = hkArgPrefixOLD.head + val hkArgPrefixLengthOLD: Int = hkArgPrefixOLD.length // unencoded operators object raw { @@ -737,12 +737,16 @@ object StdNames { class ScalaTypeNames extends ScalaNames[TypeName] { protected implicit def fromString(s: String): TypeName = typeName(s) - @switch def syntheticTypeParamName(i: Int): TypeName = "T" + i + def syntheticTypeParamName(i: Int): TypeName = "T" + i + def syntheticLambdaParamName(i: Int): TypeName = "X" + i def syntheticTypeParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticTypeParamName)(breakOut) - def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString + def syntheticLambdaParamNames(num: Int): List[TypeName] = + (0 until num).map(syntheticLambdaParamName)(breakOut) + + def hkArgOLD(n: Int): TypeName = hkArgPrefixOLD ++ n.toString final val Conforms = encode("<:<") } diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index e8053a740..2692f57a2 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1130,6 +1130,7 @@ object SymDenotations { case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType) case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType) case tp: ExprType => hasSkolems(tp.resType) + case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems) case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2) case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi) case tp: AnnotatedType => hasSkolems(tp.tpe) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 33aa060b5..12b42642d 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -56,13 +56,13 @@ object TypeApplications { def variancesConform(tparams1: List[MemberBinding], tparams2: List[MemberBinding])(implicit ctx: Context): Boolean = tparams1.corresponds(tparams2)(varianceConforms) - def fallbackTypeParams(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = { + def fallbackTypeParamsOLD(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = { def memberBindings(vs: List[Int]): Type = vs match { case Nil => NoType case v :: vs1 => RefinedType( memberBindings(vs1), - tpnme.hkArg(vs1.length), + tpnme.hkArgOLD(vs1.length), TypeBounds.empty.withBindingKind(BindingKind.fromVariance(v))) } def decompose(t: Type, acc: List[MemberBinding]): List[MemberBinding] = t match { @@ -78,14 +78,14 @@ object TypeApplications { * ==> * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } */ - object TypeLambda { + object TypeLambdaOLD { def apply(argBindingFns: List[RecType => TypeBounds], bodyFn: RecType => Type)(implicit ctx: Context): Type = { - val argNames = argBindingFns.indices.toList.map(tpnme.hkArg) + val argNames = argBindingFns.indices.toList.map(tpnme.hkArgOLD) var idx = 0 RecType.closeOver(rt => (bodyFn(rt) /: argBindingFns) { (parent, argBindingFn) => - val res = RefinedType(parent, tpnme.hkArg(idx), argBindingFn(rt)) + val res = RefinedType(parent, tpnme.hkArgOLD(idx), argBindingFn(rt)) idx += 1 res }) @@ -117,26 +117,33 @@ object TypeApplications { * @param tycon C */ object EtaExpansion { - def apply(tycon: TypeRef)(implicit ctx: Context) = { - assert(tycon.isEtaExpandable) + def apply(tycon: Type)(implicit ctx: Context) = { + if (!Config.newHK) assert(tycon.isEtaExpandableOLD) tycon.EtaExpand(tycon.typeParamSymbols) } - def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = { - def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { - case Nil => - n == 0 - case TypeRef(RecThis(rt), sel) :: args1 if false => - rt.eq(tp) && sel == tpnme.hkArg(n - 1) && argsAreForwarders(args1, n - 1) - case _ => - false - } - tp match { - case TypeLambda(argBounds, AppliedType(fn: TypeRef, args)) - if argsAreForwarders(args, tp.typeParams.length) => Some(fn) - case _ => None + def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = + if (Config.newHK) + tp match { + case tp @ TypeLambda(tparams, AppliedType(fn: TypeRef, args)) + if (args == tparams.map(_.toArg)) => Some(fn) + case _ => None + } + else { + def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { + case Nil => + n == 0 + case TypeRef(RecThis(rt), sel) :: args1 if false => + rt.eq(tp) && sel == tpnme.hkArgOLD(n - 1) && argsAreForwarders(args1, n - 1) + case _ => + false + } + tp match { + case TypeLambdaOLD(argBounds, AppliedType(fn: TypeRef, args)) + if argsAreForwarders(args, tp.typeParams.length) => Some(fn) + case _ => None + } } - } } /** Extractor for type application T[U_1, ..., U_n]. This is the refined type @@ -169,6 +176,8 @@ object TypeApplications { None } collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type]) + case HKApply(tycon, args) => + Some((tycon, args)) case _ => None } @@ -187,15 +196,15 @@ object TypeApplications { } /** The references `.this.$hk0, ..., .this.$hk`. */ - def argRefs(rt: RecType, n: Int)(implicit ctx: Context) = - List.range(0, n).map(i => RecThis(rt).select(tpnme.hkArg(i))) + def argRefsOLD(rt: RecType, n: Int)(implicit ctx: Context) = + List.range(0, n).map(i => RecThis(rt).select(tpnme.hkArgOLD(i))) - private class InstMap(fullType: Type)(implicit ctx: Context) extends TypeMap { + private class InstMapOLD(fullType: Type)(implicit ctx: Context) extends TypeMap { var localRecs: Set[RecType] = Set.empty var keptRefs: Set[Name] = Set.empty var tyconIsHK: Boolean = true def apply(tp: Type): Type = tp match { - case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => + case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgNameOLD && localRecs.contains(rt) => fullType.member(sel).info match { case TypeAlias(alias) => apply(alias) case _ => keptRefs += sel; tp @@ -203,7 +212,7 @@ object TypeApplications { case tp: TypeVar if !tp.inst.exists => val bounds = tp.instanceOpt.orElse(ctx.typeComparer.bounds(tp.origin)) bounds.foreachPart { - case TypeRef(RecThis(rt), sel) if sel.isHkArgName && localRecs.contains(rt) => + case TypeRef(RecThis(rt), sel) if sel.isHkArgNameOLD && localRecs.contains(rt) => keptRefs += sel case _ => } @@ -329,7 +338,9 @@ class TypeApplications(val self: Type) extends AnyVal { /** Is self type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case self: RefinedType => self.isTypeParam + case self: RefinedType => !Config.newHK && self.isTypeParam + case self: TypeLambda => true + case self: HKApply => false case self: SingletonType => false case self: TypeVar => self.origin.isHK case self: WildcardType => self.optBounds.isHK @@ -354,7 +365,9 @@ class TypeApplications(val self: Type) extends AnyVal { else 0 } case self: RefinedType => - if (self.isTypeParam) 1 else -1 + if (!Config.newHK && self.isTypeParam) 1 else -1 + case self: TypeLambda => 1 + case self: HKApply => -1 case self: SingletonType => -1 case self: TypeVar => self.origin.knownHK case self: WildcardType => self.optBounds.knownHK @@ -364,15 +377,14 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => -1 } - /** is receiver of the form T#$Apply? */ - def isHKApply(implicit ctx: Context): Boolean = self match { - case self @ RefinedType(_, name, _) => name.isHkArgName && !self.isTypeParam + /** is receiver a higher-kinded application? */ + def isHKApplyOLD(implicit ctx: Context): Boolean = self match { + case self @ RefinedType(_, name, _) => name.isHkArgNameOLD && !self.isTypeParam case _ => false } /** True if it can be determined without forcing that the class symbol - * of this application exists and is not a lambda trait. - * Equivalent to + * of this application exists. Equivalent to * * self.classSymbol.exists * @@ -402,11 +414,11 @@ class TypeApplications(val self: Type) extends AnyVal { /** Replace references to type parameters with references to hk arguments `this.$hk_i` * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. */ - def recursify[T <: Type](tparams: List[MemberBinding])(implicit ctx: Context): RecType => T = + def recursifyOLD[T <: Type](tparams: List[MemberBinding])(implicit ctx: Context): RecType => T = tparams match { case (_: Symbol) :: _ => (rt: RecType) => - new ctx.SafeSubstMap(tparams.asInstanceOf[List[Symbol]], argRefs(rt, tparams.length)) + new ctx.SafeSubstMap(tparams.asInstanceOf[List[Symbol]], argRefsOLD(rt, tparams.length)) .apply(self).asInstanceOf[T] case _ => def mapRefs(rt: RecType) = new TypeMap { @@ -421,23 +433,34 @@ class TypeApplications(val self: Type) extends AnyVal { /** Lambda abstract `self` with given type parameters. Examples: * * type T[X] = U becomes type T = [X] -> U - * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U) + * type T[X] >: L <: U becomes type T >: L <: ([X] -> U) + * + * TODO: Handle parameterized lower bounds */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { def expand(tp: Type) = - TypeLambda( - tparams.map(tparam => - tparam.memberBoundsAsSeenFrom(self) - .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .recursify(tparams)), - tp.recursify(tparams)) + if (Config.newHK) TypeLambda.fromSymbols(tparams, tp) + else + TypeLambdaOLD( + tparams.map(tparam => + tparam.memberBoundsAsSeenFrom(self) + .withBindingKind(BindingKind.fromVariance(tparam.variance)) + .recursifyOLD(tparams)), + tp.recursifyOLD(tparams)) assert(!isHK, self) - self match { + if (Config.newHK) self match { case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias.normalizeHkApply)) + self.derivedTypeAlias(expand(self.alias)) case self @ TypeBounds(lo, hi) => - self.derivedTypeBounds(lo, expand(hi.normalizeHkApply)) + self.derivedTypeBounds(lo, expand(hi)) + case _ => expand(self) + } + else self match { + case self: TypeAlias => + self.derivedTypeAlias(expand(self.alias.normalizeHkApplyOLD)) + case self @ TypeBounds(lo, hi) => + self.derivedTypeBounds(lo, expand(hi.normalizeHkApplyOLD)) case _ => expand(self) } } @@ -470,9 +493,9 @@ class TypeApplications(val self: Type) extends AnyVal { * - dropping refinements and rec-types * - going from a wildcard type to its upper bound */ - def normalizeHkApply(implicit ctx: Context): Type = self.strictDealias match { - case self1 @ RefinedType(_, rname, _) if rname.isHkArgName && self1.typeParams.isEmpty => - val inst = new InstMap(self) + def normalizeHkApplyOLD(implicit ctx: Context): Type = self.strictDealias match { + case self1 @ RefinedType(_, rname, _) if rname.isHkArgNameOLD && self1.typeParams.isEmpty => + val inst = new InstMapOLD(self) def instTop(tp: Type): Type = tp.strictDealias match { case tp: RecType => @@ -480,12 +503,12 @@ class TypeApplications(val self: Type) extends AnyVal { tp.rebind(instTop(tp.parent)) case tp @ RefinedType(parent, rname, rinfo) => rinfo match { - case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgName && inst.localRecs.contains(rt) => + case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgNameOLD && inst.localRecs.contains(rt) => val bounds @ TypeBounds(_, _) = self.member(sel).info instTop(tp.derivedRefinedType(parent, rname, bounds.withBindingKind(NoBinding))) case _ => val parent1 = instTop(parent) - if (rname.isHkArgName && + if (rname.isHkArgNameOLD && !inst.tyconIsHK && !inst.keptRefs.contains(rname)) parent1 else tp.derivedRefinedType(parent1, rname, inst(rinfo)) @@ -527,7 +550,7 @@ class TypeApplications(val self: Type) extends AnyVal { * In that case we can look for parameterized base types of the type * to eta expand them. */ - def isEtaExpandable(implicit ctx: Context) = self match { + def isEtaExpandableOLD(implicit ctx: Context) = self match { case self: TypeRef => self.symbol.isClass case _ => false } @@ -603,7 +626,14 @@ class TypeApplications(val self: Type) extends AnyVal { if (hkParams.isEmpty) self else { def adaptArg(arg: Type): Type = arg match { - case arg @ TypeLambda(tparamBounds, body) if + case arg @ TypeLambda(tparams, body) if + !tparams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && + tparams.corresponds(hkParams)(varianceConforms) => + TypeLambda(tparams.map(_.memberName), hkParams.map(_.memberVariance))( + tl => arg.paramBounds.map(_.subst(arg, tl).bounds), + tl => arg.resultType.subst(arg, tl) + ) + case arg @ TypeLambdaOLD(tparamBounds, body) if !arg.typeParams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && arg.typeParams.corresponds(hkParams)(varianceConforms) => def adjustVariance(bounds: TypeBounds, tparam: MemberBinding): TypeBounds = @@ -613,7 +643,7 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => (x => tp) } val adjusted = (tparamBounds, hkParams).zipped.map(adjustVariance) - TypeLambda(adjusted.map(lift), lift(body)) + TypeLambdaOLD(adjusted.map(lift), lift(body)) case arg @ TypeAlias(alias) => arg.derivedTypeAlias(adaptArg(alias)) case arg @ TypeBounds(lo, hi) => @@ -641,22 +671,22 @@ class TypeApplications(val self: Type) extends AnyVal { * 3. If `T` is a polytype, instantiate it to `U1,...,Un`. */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { - def substHkArgs = new TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(RecThis(rt), name) if rt.eq(self) && name.isHkArgName => - args(name.hkArgIndex) - case _ => - mapOver(tp) - } - } if (args.isEmpty || ctx.erasedTypes) self - else self.stripTypeVar match { + else self.stripTypeVar match { // TODO investigate why we can't do safeDealias here + case self: PolyType if !args.exists(_.isInstanceOf[TypeBounds]) => + self.instantiate(args) case EtaExpansion(self1) => self1.appliedTo(args) - case TypeLambda(_, body) if !args.exists(_.isInstanceOf[TypeBounds]) => + case TypeLambdaOLD(_, body) if !args.exists(_.isInstanceOf[TypeBounds]) => + def substHkArgs = new TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(RecThis(rt), name) if rt.eq(self) && name.isHkArgNameOLD => + args(name.hkArgIndexOLD) + case _ => + mapOver(tp) + } + } substHkArgs(body) - case self: PolyType => - self.instantiate(args) case self1 => self1.safeDealias.appliedTo(args, typeParams) } @@ -682,9 +712,10 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => t } assert(args.nonEmpty) - matchParams(self, typParams, args) match { - case refined @ RefinedType(_, pname, _) if pname.isHkArgName => - refined.betaReduce // TODO Move to matchparams + if (Config.newHK && self.isHK) AppliedType(self, args) + else matchParams(self, typParams, args) match { + case refined @ RefinedType(_, pname, _) if !Config.newHK && pname.isHkArgNameOLD => + refined.betaReduceOLD case refined => refined } @@ -698,17 +729,25 @@ class TypeApplications(val self: Type) extends AnyVal { * up hk type parameters matching the arguments. This is needed when unpickling * Scala2 files such as `scala.collection.generic.Mapfactory`. */ - final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = { - val safeTypeParams = self match { - case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => - // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC - ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") - fallbackTypeParams(args map alwaysZero) - case _ => - typeParams + final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = + if (Config.newHK) + self match { + case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => + AppliedType(self, args) + case _ => + appliedTo(args, typeParams) + } + else { + val safeTypeParams = self match { + case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => + // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC + ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") + fallbackTypeParamsOLD(args map alwaysZero) + case _ => + typeParams + } + appliedTo(args, safeTypeParams) } - appliedTo(args, safeTypeParams) - } /** Turn this type, which is used as an argument for * type parameter `tparam`, into a TypeBounds RHS @@ -731,7 +770,10 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] = if (self derivesFrom base) - base.typeParams map (param => self.member(param.name).info.argInfo) + self match { + case self: HKApply => self.upperBound.baseArgInfos(base) + case _ => base.typeParams.map(param => self.member(param.name).info.argInfo) + } else Nil @@ -756,7 +798,10 @@ class TypeApplications(val self: Type) extends AnyVal { /** The first type argument of the base type instance wrt `base` of this type */ final def firstBaseArgInfo(base: Symbol)(implicit ctx: Context): Type = base.typeParams match { case param :: _ if self derivesFrom base => - self.member(param.name).info.argInfo + self match { + case self: HKApply => self.upperBound.firstBaseArgInfo(base) + case _ => self.member(param.name).info.argInfo + } case _ => NoType } @@ -778,6 +823,8 @@ class TypeApplications(val self: Type) extends AnyVal { tp.wrapIfMember(parent.baseTypeWithArgs(base)) case tp: TermRef => tp.underlying.baseTypeWithArgs(base) + case tp: HKApply => + tp.upperBound.baseTypeWithArgs(base) case AndType(tp1, tp2) => tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base) case OrType(tp1, tp2) => @@ -832,12 +879,16 @@ class TypeApplications(val self: Type) extends AnyVal { /** The core type without any type arguments. * @param `typeArgs` must be the type arguments of this type. */ - final def withoutArgs(typeArgs: List[Type]): Type = typeArgs match { - case _ :: typeArgs1 => - val RefinedType(tycon, _, _) = self - tycon.withoutArgs(typeArgs1) - case nil => - self + final def withoutArgs(typeArgs: List[Type]): Type = self match { + case HKApply(tycon, args) => tycon + case _ => + typeArgs match { + case _ :: typeArgs1 => + val RefinedType(tycon, _, _) = self + tycon.withoutArgs(typeArgs1) + case nil => + self + } } final def typeConstructor(implicit ctx: Context): Type = self.stripTypeVar match { diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index cf3086323..566865eb4 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -365,11 +365,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { fourthTry(tp1, tp2) || compareRefinedSlow case _ => if (tp2.isTypeParam) { - compareHkLambda(tp1, tp2) || + compareHkLambdaOLD(tp1, tp2) || fourthTry(tp1, tp2) } else { - compareHkApply(tp2, tp1, inOrder = false) || + compareHkApplyOLD(tp2, tp1, inOrder = false) || compareRefinedSlow || fourthTry(tp1, tp2) || compareAliasedRefined(tp2, tp1, inOrder = false) @@ -389,6 +389,53 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val tp1stable = ensureStableSingleton(tp1) isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) } + case tp2 @ HKApply(tycon2, args2) => + def compareHkApply(tycon2: Type): Boolean = tycon2 match { + case tycon2: TypeVar => compareHkApply(tycon2.underlying) + case param2: PolyParam if canConstrain(param2) => + val tparams2 = tycon2.typeParams + + def tyconOK(tycon1a: Type) = + variancesConform(tycon1a.typeParams, tparams2) && { + if (ctx.mode.is(Mode.TypevarsMissContext)) isSubType(tp1, tycon1a.appliedTo(args2)) + else tryInstantiate(param2, tycon1a) && isSubType(tp1, tp2) + } + + tp1 match { + case tp1 @ HKApply(tycon1, _) => + tyconOK(tycon1) || isSubType(tp1.upperBound, tp2) + case _ if tp1.widenDealias.typeSymbol.isClass => + val classBounds = tp2.classSymbols + def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match { + case bc :: bcs1 => + classBounds.exists(bc.derivesFrom) && tyconOK(tp1.baseTypeRef(bc)) || + liftToBase(bcs1) + case _ => + false + } + liftToBase(tp1.baseClasses) + case tp1: TypeProxy => + isSubType(tp1.underlying, tp2) + case _ => + false + } + case _ => + // TODO handle lower bounds of hk params here + false + } + compareHkApply(tycon2) || fourthTry(tp1, tp2) + case tp2 @ TypeLambda(tparams2, body2) => + def compareHkLambda = tp1.stripTypeVar match { + case tp1 @ TypeLambda(tparams1, body1) => + val boundsConform = + tparams1.corresponds(tparams2)((tparam1, tparam2) => + isSubType(tparam2.memberBounds.subst(tp2, tp1), tparam1.memberBounds)) + val bodiesConform = isSubType(body1, body2.subst(tp2, tp1)) + variancesConform(tparams1, tparams2) && boundsConform && bodiesConform + case _ => + fourthTry(tp1, tp2) + } + compareHkLambda case OrType(tp21, tp22) => // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22) // and analogously for T1 <: T21 | (T221 & T222) @@ -502,11 +549,22 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - compareHkApply(tp1, tp2, inOrder = true) || + compareHkApplyOLD(tp1, tp2, inOrder = true) || isNewSubType(tp1.parent, tp2) || compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => isNewSubType(tp1.parent, tp2) + case HKApply(tycon1, args1) => + tp2 match { + case AppliedType(tycon2, args2) => + assert(!tycon2.isHK) // this should have been handled by thirdTry + isSubType(tycon1, EtaExpansion(tycon2)) && + isSubArgs(args1, args2, tycon2.typeParams) + case _ => + false + } + case EtaExpansion(tycon1) => + isSubType(tycon1, tp2) case AndType(tp11, tp12) => // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 @@ -537,6 +595,14 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } + def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[MemberBinding]): Boolean = + if (args1.isEmpty) args2.isEmpty + else args2.nonEmpty && { + val v = tparams.head.memberVariance + (v > 0 || isSubType(args2.head, args1.head)) && + (v < 0 || isSubType(args1.head, args2.head)) + } + /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, * - the type parameters of `B` match one-by-one the variances of `tparams`, @@ -588,7 +654,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * * (4) If `inOrder`, test `app <: other` else test `other <: app`. */ - def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean): Boolean = { + def compareHkApplyOLD(app: RefinedType, other: Type, inOrder: Boolean): Boolean = { def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($app, $other, inOrder = $inOrder, constr = $tp)", subtyping) { tp match { case tp: TypeVar => tryInfer(tp.underlying) @@ -597,7 +663,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { def unifyWith(liftedOther: Type): Boolean = { subtyping.println(i"unify with $liftedOther") liftedOther.typeConstructor.widen match { - case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty => + case tycon: TypeRef if tycon.isEtaExpandableOLD && tycon.typeParams.nonEmpty => val (ok, app1) = if (ctx.mode.is(Mode.TypevarsMissContext)) (true, EtaExpansion(tycon).appliedTo(app.argInfos)) @@ -634,8 +700,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } } - app.isHKApply && !other.isHKApply && { - val reduced = if (inOrder) app else app.normalizeHkApply + app.isHKApplyOLD && !other.isHKApplyOLD && { + val reduced = if (inOrder) app else app.normalizeHkApplyOLD if (reduced ne app) if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) else tryInfer(app.typeConstructor.dealias) @@ -643,11 +709,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } /** Compare type lambda with non-lambda type. */ - def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean = tp1.stripTypeVar match { - case TypeLambda(args1, body1) => + def compareHkLambdaOLD(tp1: Type, tp2: RefinedType): Boolean = tp1.stripTypeVar match { + case TypeLambdaOLD(args1, body1) => //println(i"comparing $tp1 <:< $tp2") tp2 match { - case TypeLambda(args2, body2) => + case TypeLambdaOLD(args2, body2) => args1.corresponds(args2)((arg1, arg2) => varianceConforms(BindingKind.toVariance(arg1.bindingKind), BindingKind.toVariance(arg2.bindingKind))) && @@ -656,7 +722,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => false } case RefinedType(parent1, _, _) => - compareHkLambda(parent1, tp2) + compareHkLambdaOLD(parent1, tp2) case _ => false } @@ -1161,22 +1227,36 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val tparams2 = tp2.typeParams if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) + else if (Config.newHK) { + val numArgs = tparams1.length + def argRefs(tl: PolyType) = List.range(0, numArgs).map(PolyParam(tl, _)) + TypeLambda( + paramNames = tpnme.syntheticLambdaParamNames(numArgs), + variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) => + (tparam1.memberVariance + tparam2.memberVariance) / 2))( + paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) => + tl.lifted(tparams1, tparam1.memberBoundsAsSeenFrom(tp1)).bounds & + tl.lifted(tparams2, tparam2.memberBoundsAsSeenFrom(tp2)).bounds), + resultTypeExp = tl => + op(tl.lifted(tparams1, tp1).appliedTo(argRefs(tl)), + tl.lifted(tparams2, tp2).appliedTo(argRefs(tl)))) + } else { val bindings: List[RecType => TypeBounds] = (tparams1, tparams2).zipped.map { (tparam1, tparam2) => val b1: RecType => TypeBounds = - tparam1.memberBoundsAsSeenFrom(tp1).recursify(tparams1) + tparam1.memberBoundsAsSeenFrom(tp1).recursifyOLD(tparams1) val b2: RecType => TypeBounds = - tparam2.memberBoundsAsSeenFrom(tp2).recursify(tparams2) + tparam2.memberBoundsAsSeenFrom(tp2).recursifyOLD(tparams2) (rt: RecType) => (b1(rt) & b2(rt)) .withBindingKind( BindingKind.fromVariance( (tparam1.memberVariance + tparam2.memberVariance) / 2)) } - val app1: RecType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length)) - val app2: RecType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length)) + val app1: RecType => Type = rt => tp1.appliedTo(argRefsOLD(rt, tparams1.length)) + val app2: RecType => Type = rt => tp2.appliedTo(argRefsOLD(rt, tparams2.length)) val body: RecType => Type = rt => op(app1(rt), app2(rt)) - TypeLambda(bindings, body) + TypeLambdaOLD(bindings, body) } } @@ -1459,19 +1539,19 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) - override def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean) = - if (app.isHKApply) - traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.normalizeHkApply}") { - super.compareHkApply(app, other, inOrder) + override def compareHkApplyOLD(app: RefinedType, other: Type, inOrder: Boolean) = + if (app.isHKApplyOLD) + traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.normalizeHkApplyOLD}") { + super.compareHkApplyOLD(app, other, inOrder) } - else super.compareHkApply(app, other, inOrder) + else super.compareHkApplyOLD(app, other, inOrder) - override def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean = + override def compareHkLambdaOLD(tp1: Type, tp2: RefinedType): Boolean = if (tp2.isTypeParam) traceIndented(i"compareHkLambda $tp1, $tp2") { - super.compareHkLambda(tp1, tp2) + super.compareHkLambdaOLD(tp1, tp2) } - else super.compareHkLambda(tp1, tp2) + else super.compareHkLambdaOLD(tp1, tp2) override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index a5aabe9c4..c71726a3e 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -356,6 +356,8 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean case rt => tp.derivedMethodType(tp.paramNames, formals, rt) } + case tp: TypeLambda => + this(tp.resultType) case tp: PolyType => this(tp.resultType) match { case rt: MethodType => rt diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 6b75b574e..9019df6b7 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -158,7 +158,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. tp case tp: RefinedType => tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap)) - .normalizeHkApply + .normalizeHkApplyOLD case tp: TypeAlias => tp.derivedTypeAlias(simplify(tp.alias, theMap)) case AndType(l, r) => @@ -384,7 +384,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias.normalizeHkApply match { + def normalizeToRef(tp: Type): TypeRef = tp.dealias.normalizeHkApplyOLD match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index cd1b5739d..8d152a616 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -56,6 +56,7 @@ object Types { * | +- PolyParam * | +- RefinedOrRecType -+-- RefinedType * | | -+-- RecType + * | +- HKApply * | +- TypeBounds * | +- ExprType * | +- AnnotatedType @@ -65,8 +66,8 @@ object Types { * +- OrType * +- MethodType -----+- ImplicitMethodType * | +- JavaMethodType - * +- PolyType * +- ClassInfo + * +- PolyType --------- TypeLambda * | * +- NoType * +- NoPrefix @@ -476,7 +477,8 @@ object Types { // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)` // call below. To avoid this problem we do a defensive copy of the recursive // type first. But if we do this always we risk being inefficient and we run into - // stackoverflows when compiling pos/hk.scala. So we only do a copy if the type + // stackoverflows when compiling pos/hk.scala under the refinement encoding + // of hk-types. So we only do a copy if the type // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`. // Furthermore, if this happens we mark the original recursive type with `openedTwice` // which means that we always defensively copy the type in the future. This second @@ -893,7 +895,7 @@ object Types { case _ => this } - /** If this is a TypeAlias type, its alias otherwise this type itself */ + /** If this is a TypeAlias type, its alias, otherwise this type itself */ final def followTypeAlias(implicit ctx: Context): Type = this match { case TypeAlias(alias) => alias case _ => this @@ -1326,8 +1328,14 @@ object Types { /** A marker trait for types that apply only to type symbols */ trait TypeType extends Type - /** A marker trait for types that apply only to term symbols */ - trait TermType extends Type + /** A marker trait for types that apply only to term symbols or that + * represent higher-kinded types. + */ + trait TermOrHkType extends Type + + /** A marker trait for types that apply only to term symbols. + */ + trait TermType extends TermOrHkType /** A marker trait for types that can be types of values or prototypes of value types */ trait ValueTypeOrProto extends TermType @@ -1568,6 +1576,7 @@ object Types { // we might now get cycles over members that are in a refinement but that lack // a symbol. Without the following precaution i974.scala stackoverflows when compiled // with new hk scheme. + // TODO: Do we still need the complications here? val savedDenot = lastDenotation val savedSymbol = lastSymbol if (prefix.isInstanceOf[RecThis] && name.isTypeName) { @@ -1765,7 +1774,7 @@ object Types { override def underlying(implicit ctx: Context): Type = { val res = info - assert(res != this, this) + assert(res != this, this) // TODO drop res } } @@ -2076,8 +2085,8 @@ object Types { this } - def betaReduce(implicit ctx: Context): Type = refinedInfo match { - case TypeAlias(alias) if refinedName.isHkArgName => + def betaReduceOLD(implicit ctx: Context): Type = refinedInfo match { + case TypeAlias(alias) if refinedName.isHkArgNameOLD => def instantiate(rt: RecType) = new TypeMap { def apply(t: Type) = t match { case TypeRef(RecThis(`rt`), `refinedName`) => alias @@ -2121,7 +2130,7 @@ object Types { // A Y-check error (incompatible types involving hk lambdas) for dotty itself. // TODO: investigate and, if possible, drop after revision. val normalizedRefinedInfo = refinedInfo.substRecThis(dummyRec, dummyRec) - RefinedType(parent, refinedName, normalizedRefinedInfo).betaReduce + RefinedType(parent, refinedName, normalizedRefinedInfo).betaReduceOLD } /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ @@ -2130,6 +2139,7 @@ object Types { else parent // MemberBinding methods + // TODO: Needed? def isTypeParam(implicit ctx: Context) = refinedInfo match { case tp: TypeBounds => tp.isBinding case _ => false @@ -2563,7 +2573,7 @@ object Types { } abstract case class PolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends CachedGroundType with BindingType with TermType with MethodOrPoly { + extends CachedGroundType with BindingType with TermOrHkType with MethodOrPoly { val paramBounds = paramBoundsExp(this) val resType = resultTypeExp(this) @@ -2572,6 +2582,9 @@ object Types { override def resultType(implicit ctx: Context) = resType + /** If this is a type lambda, the variances of its parameters, otherwise Nil.*/ + def variances: List[Int] = Nil + protected def computeSignature(implicit ctx: Context) = resultSignature def isPolymorphicMethodType: Boolean = resType match { @@ -2590,17 +2603,33 @@ object Types { else duplicate(paramNames, paramBounds, resType) def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context) = - PolyType(paramNames)( + if (this.variances.isEmpty) + PolyType(paramNames)( x => paramBounds mapConserve (_.subst(this, x).bounds), x => resType.subst(this, x)) + else + TypeLambda(paramNames, variances)( + x => paramBounds mapConserve (_.subst(this, x).bounds), + x => resType.subst(this, x)) + + def lifted(tparams: List[MemberBinding], t: Type)(implicit ctx: Context): Type = + tparams match { + case LambdaParam(poly, _) :: _ => + t.subst(poly, this) + case tparams: List[Symbol] => + t.subst(tparams, tparams.indices.toList.map(PolyParam(this, _))) + } override def equals(other: Any) = other match { case other: PolyType => - other.paramNames == this.paramNames && other.paramBounds == this.paramBounds && other.resType == this.resType + other.paramNames == this.paramNames && + other.paramBounds == this.paramBounds && + other.resType == this.resType && + other.variances == this.variances case _ => false } override def computeHash = { - doHash(paramNames, resType, paramBounds) + doHash(variances ::: paramNames, resType, paramBounds) } override def toString = s"PolyType($paramNames, $paramBounds, $resType)" @@ -2616,13 +2645,75 @@ object Types { def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) = if (tparams.isEmpty) resultType - else { - def transform(pt: PolyType, tp: Type) = - tp.subst(tparams, (0 until tparams.length).toList map (PolyParam(pt, _))) - apply(tparams map (_.name.asTypeName))( - pt => tparams map (tparam => transform(pt, tparam.info).bounds), - pt => transform(pt, resultType)) + else apply(tparams map (_.name.asTypeName))( + pt => tparams.map(tparam => pt.lifted(tparams, tparam.info).bounds), + pt => pt.lifted(tparams, resultType)) + } + + // ----- HK types: TypeLambda, LambdaParam, HKApply --------------------- + + /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */ + class TypeLambda(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) + extends PolyType(paramNames)(paramBoundsExp, resultTypeExp) { + + lazy val typeParams: List[LambdaParam] = + paramNames.indices.toList.map(new LambdaParam(this, _)) + + override def toString = s"TypeLambda($variances, $paramNames, $paramBounds, $resType)" + + } + + /** The parameter of a type lambda */ + case class LambdaParam(tl: TypeLambda, n: Int) extends MemberBinding { + def isTypeParam(implicit ctx: Context) = true + def memberName(implicit ctx: Context): TypeName = tl.paramNames(n) + def memberBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n) + def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = memberBounds + def memberVariance(implicit ctx: Context): Int = tl.variances(n) + def toArg: Type = PolyParam(tl, n) + } + + object TypeLambda { + def apply(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = { + unique(new TypeLambda(paramNames, variances)(paramBoundsExp, resultTypeExp)) + } + def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) = + if (tparams.isEmpty) resultType + else apply(tparams map (_.name.asTypeName), tparams.map(_.variance))( + pt => tparams.map(tparam => pt.lifted(tparams, tparam.info).bounds), + pt => pt.lifted(tparams, resultType)) + def unapply(tl: TypeLambda): Some[(List[LambdaParam], Type)] = + Some((tl.typeParams, tl.resType)) + } + + /** A higher kinded type application `C[T_1, ..., T_n]` */ + abstract case class HKApply(tycon: Type, args: List[Type]) + extends CachedProxyType with TermOrHkType { + override def underlying(implicit ctx: Context): Type = tycon + def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = + if ((tycon eq this.tycon) && (args eq this.args)) this + else tycon.appliedTo(args) + override def computeHash = doHash(tycon, args) + + def upperBound(implicit ctx: Context): Type = tycon.stripTypeVar match { + case tp: TypeProxy => tp.underlying.appliedTo(args) + case _ => tycon + } + + protected def checkInst(implicit ctx: Context): this.type = { + tycon.stripTypeVar match { + case _: TypeRef | _: PolyParam | _: WildcardType | ErrorType => + case _ => assert(false, s"illegal type constructor in $this") } + this + } + } + + final class CachedHKApply(tycon: Type, args: List[Type]) extends HKApply(tycon, args) + + object HKApply { + def apply(tycon: Type, args: List[Type])(implicit ctx: Context) = + unique(new CachedHKApply(tycon, args)).checkInst } // ----- Bound types: MethodParam, PolyParam -------------------------- @@ -3021,8 +3112,8 @@ object Types { */ abstract case class TypeBounds(lo: Type, hi: Type)(val bindingKind: BindingKind) extends CachedProxyType with TypeType { - assert(lo.isInstanceOf[TermType]) - assert(hi.isInstanceOf[TermType]) + assert(lo.isInstanceOf[TermOrHkType]) + assert(hi.isInstanceOf[TermOrHkType]) def variance: Int = 0 def isBinding = bindingKind != NoBinding @@ -3151,6 +3242,7 @@ object Types { /** A value class defining the interpretation of a TypeBounds * as either a regular type bounds or a binding (i.e. introduction) of a * higher-kinded type parameter. + * TODO: drop */ class BindingKind(val n: Byte) extends AnyVal { def join(that: BindingKind) = @@ -3225,7 +3317,7 @@ object Types { object ErrorType extends ErrorType /** Wildcard type, possibly with bounds */ - abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType { + abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermOrHkType { def derivedWildcardType(optBounds: Type)(implicit ctx: Context) = if (optBounds eq this.optBounds) this else if (!optBounds.exists) WildcardType @@ -3323,6 +3415,8 @@ object Types { tp.derivedTypeBounds(lo, hi) protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type = tp.derivedSuperType(thistp, supertp) + protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type = + tp.derivedAppliedType(tycon, args) protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type = tp.derivedAndOrType(tp1, tp2) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = @@ -3405,6 +3499,17 @@ object Types { val inst = tp.instanceOpt if (inst.exists) apply(inst) else tp + case tp: HKApply => + def mapArg(arg: Type, tparam: MemberBinding): Type = { + val saved = variance + if (tparam.memberVariance < 0) variance = -variance + else if (tparam.memberVariance == 0) variance = 0 + try this(arg) + finally variance = saved + } + derivedAppliedType(tp, this(tp.tycon), + tp.args.zipWithConserve(tp.tycon.typeParams)(mapArg)) + case tp: AndOrType => derivedAndOrType(tp, this(tp.tp1), this(tp.tp2)) @@ -3508,6 +3613,9 @@ object Types { override protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type) = if (thistp.exists && supertp.exists) tp.derivedSuperType(thistp, supertp) else NoType + override protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type = + if (tycon.exists && args.forall(_.exists)) tp.derivedAppliedType(tycon, args) + else approx() // This is rather coarse, but to do better is a bit complicated override protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type) = if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2) else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d @@ -3600,6 +3708,9 @@ object Types { case tp @ ClassInfo(prefix, _, _, _, _) => this(x, prefix) + case tp @ HKApply(tycon, args) => + foldOver(this(x, tycon), args) + case tp: AndOrType => this(this(x, tp.tp1), tp.tp2) diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index 38d55e0e8..b23ee5aba 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -138,6 +138,7 @@ Standard-Section: "ASTs" TopLevelStat* BIND Length boundName_NameRef bounds_Type // for type-variables defined in a type pattern BYNAMEtype underlying_Type + LAMBDAtype Length result_Type NamesTypes // variance encoded in front of name: +/-/= POLYtype Length result_Type NamesTypes // needed for refinements METHODtype Length result_Type NamesTypes // needed for refinements PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements @@ -326,7 +327,8 @@ object TastyFormat { final val ORtype = 172 final val METHODtype = 174 final val POLYtype = 175 - final val PARAMtype = 176 + final val LAMBDAtype = 176 + final val PARAMtype = 177 final val ANNOTATION = 178 final val firstSimpleTreeTag = UNITconst @@ -500,4 +502,8 @@ object TastyFormat { case TYPEBOUNDS => -2 case _ => 0 } + + /** Map between variances and name prefixes */ + val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+') + val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1) } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index d6e6c4d6b..f604bff62 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -254,6 +254,11 @@ class TreePickler(pickler: TastyPickler) { case tpe: ExprType => writeByte(BYNAMEtype) pickleType(tpe.underlying) + case tpe: TypeLambda => + writeByte(LAMBDAtype) + val paramNames = tpe.typeParams.map(tparam => + varianceToPrefix(tparam.memberVariance) +: tparam.memberName) + pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds) case tpe: MethodType if richTypes => writeByte(METHODtype) pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes) diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 2d230c630..6f0596ac0 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -287,6 +287,15 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType()) registerSym(start, sym) TypeRef.withFixedSym(NoPrefix, sym.name, sym) + case LAMBDAtype => + val (rawNames, paramReader) = readNamesSkipParams[TypeName] + val (variances, paramNames) = rawNames + .map(name => (prefixToVariance(name.head), name.tail.asTypeName)).unzip + val result = TypeLambda(paramNames, variances)( + pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)), + pt => readType()) + goto(end) + result case POLYtype => val (names, paramReader) = readNamesSkipParams[TypeName] val result = PolyType(names)( diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index a8888fd3c..07819ef77 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -6,6 +6,7 @@ import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, De import Contexts.Context, Scopes.Scope, Denotations.Denotation, Annotations.Annotation import StdNames.{nme, tpnme} import ast.Trees._, ast._ +import config.Config import java.lang.Integer.toOctalString import config.Config.summarizeDepth import scala.annotation.switch @@ -50,8 +51,8 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) - case tp: RefinedType => - tp.normalizeHkApply + case tp: RefinedType if !Config.newHK => + tp.normalizeHkApplyOLD case tp: SkolemType => homogenize(tp.info) case tp: LazyRef => @@ -110,6 +111,30 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def toTextRefinement(rt: RefinedType) = (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close + protected def argText(arg: Type): Text = arg match { + case arg: TypeBounds => "_" ~ toTextGlobal(arg) + case _ => toTextGlobal(arg) + } + + /** The text for a TypeLambda + * + * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T + * + * where + * @param paramNames = p_1, ..., p_n + * @param variances = v_1, ..., v_n + * @param argBoundss = B_1, ..., B_n + * @param body = T + */ + protected def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = { + def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text = + varianceString(variance) ~ name ~ toText(bounds) + changePrec(GlobalPrec) { + "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~ + "] -> " ~ toTextGlobal(body) + } + } + /** The longest sequence of refinement types, starting at given type * and following parents. */ @@ -174,6 +199,10 @@ class PlainPrinter(_ctx: Context) extends Printer { toText(polyParamName(pt.paramNames(n))) ~ polyHash(pt) case AnnotatedType(tpe, annot) => toTextLocal(tpe) ~ " " ~ toText(annot) + case tp: TypeLambda => + typeLambdaText(tp.paramNames.map(_.toString), tp.variances, tp.paramBounds, tp.resultType) + case HKApply(tycon, args) => + toTextLocal(tycon) ~ "[!" ~ Text(args.map(argText), ", ") ~ "]" case tp: TypeVar => if (tp.isInstantiated) toTextLocal(tp.instanceOpt) ~ "'" // debug for now, so that we can see where the TypeVars are. @@ -186,7 +215,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else toText(tp.origin) } case tp: LazyRef => - "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")" + "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")" // TODO: only print this during debug mode? case _ => tp.fallbackToText(this) } diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 3da977b31..91f896da2 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -95,10 +95,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } override def toText(tp: Type): Text = controlled { - def argText(arg: Type): Text = arg match { - case arg: TypeBounds => "_" ~ toTextGlobal(arg) - case _ => toTextGlobal(arg) - } def toTextTuple(args: List[Type]): Text = "(" ~ toTextGlobal(args, ", ") ~ ")" def toTextFunction(args: List[Type]): Text = @@ -117,7 +113,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isFunctionClass(cls)) return toTextFunction(args) if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close - case tp @ TypeLambda(argBoundss, body) => + case tp @ TypeLambdaOLD(argBoundss, body) => val variances = argBoundss.map(b => BindingKind.toVariance(b.bindingKind)) val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar val paramNames = argBoundss.indices.toList.map(prefix.toString + _) @@ -130,13 +126,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } def apply(t: Type): Type = t match { - case TypeRef(RecThis(rt), name) if name.isHkArgName && contains(tp, rt) => + case TypeRef(RecThis(rt), name) if name.isHkArgNameOLD && contains(tp, rt) => // Make up a name that prints as "Xi". Need to be careful we do not // accidentally unique-hash to something else. That's why we can't // use prefix = NoPrefix or a WithFixedSym instance. TypeRef.withSymAndName( defn.EmptyPackageClass.thisType, defn.AnyClass, - paramNames(name.hkArgIndex).toTypeName) + paramNames(name.hkArgIndexOLD).toTypeName) case _ => mapOver(t) } @@ -186,25 +182,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def blockText[T >: Untyped](trees: List[Tree[T]]): Text = "{" ~ toText(trees, "\n") ~ "}" - /** The text for a TypeLambda - * - * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T - * - * where - * @param paramNames = p_1, ..., p_n - * @param variances = v_1, ..., v_n - * @param argBoundss = B_1, ..., B_n - * @param body = T - */ - def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = { - def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text = - varianceString(variance) ~ name ~ toText(bounds) - changePrec(GlobalPrec) { - "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~ - "] -> " ~ toTextGlobal(body) - } - } - override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { import untpd.{modsDeco => _, _} diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index 3f3108ac2..27cc0e6f5 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -4,9 +4,10 @@ package typer import core._ import ast._ +import config.Config.newHK import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._ import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._ -import TypeApplications.AppliedType +import TypeApplications.{AppliedType, TypeLambdaOLD} import util.Positions._ import config.Printers._ import ast.Trees._ @@ -426,14 +427,16 @@ trait TypeAssigner { tree.withType(ownType) } - def assignType(tree: untpd.TypeLambdaTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = { - val tparams = tparamDefs.map(_.symbol) - val argBindingFns = tparams.map(tparam => - tparam.info.bounds - .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .recursify(tparams)) - val bodyFn = body.tpe.recursify(tparams) - tree.withType(TypeApplications.TypeLambda(argBindingFns, bodyFn)) + def assignType(tree: untpd.TypeLambdaTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = + if (newHK) tree.withType(TypeLambda.fromSymbols(tparamDefs.map(_.symbol), body.tpe)) + else { + val tparams = tparamDefs.map(_.symbol) + val argBindingFns = tparams.map(tparam => + tparam.info.bounds + .withBindingKind(BindingKind.fromVariance(tparam.variance)) + .recursifyOLD(tparams)) + val bodyFn = body.tpe.recursifyOLD(tparams) + tree.withType(TypeLambdaOLD(argBindingFns, bodyFn)) } def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala index e88423f98..9af11a0f4 100644 --- a/src/dotty/tools/dotc/typer/Variances.scala +++ b/src/dotty/tools/dotc/typer/Variances.scala @@ -83,6 +83,17 @@ object Variances { flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam) case ExprType(restpe) => varianceInType(restpe)(tparam) + case tp @ HKApply(tycon, args) => + def varianceInArgs(v: Variance, args: List[Type], tparams: List[MemberBinding]): Variance = + args match { + case arg :: args1 => + varianceInArgs( + v & compose(varianceInType(arg)(tparam), tparams.head.memberVariance), + args1, tparams.tail) + case nil => + v + } + varianceInArgs(varianceInType(tycon)(tparam), args, tycon.typeParams) case tp @ PolyType(_) => flip(varianceInTypes(tp.paramBounds)(tparam)) & varianceInType(tp.resultType)(tparam) case AnnotatedType(tp, annot) => -- cgit v1.2.3 From 3490e018e8b11a9d30629e8d415cbae5efd4abf4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 14 Jun 2016 15:46:06 +0200 Subject: Make Constraint#bounds work for aliases Bounds did not do the right thing if a constrain parameter was aliased. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 00b8a5d25..66767d58a 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -193,9 +193,9 @@ trait ConstraintHandling { } /** The current bounds of type parameter `param` */ - final def bounds(param: PolyParam): TypeBounds = constraint.entry(param) match { - case bounds: TypeBounds => bounds - case _ => param.binder.paramBounds(param.paramNum) + final def bounds(param: PolyParam): TypeBounds = { + val e = constraint.entry(param) + if (e.exists) e.bounds else param.binder.paramBounds(param.paramNum) } /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint -- cgit v1.2.3 From f6efd99e09843d54150f4c5e0f723087ba92007e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:03:38 +0200 Subject: Fix appliedTo and typeParams, and higher kinded subtyping tests Add existential type elimination for HKApply --- src/dotty/tools/dotc/core/TypeApplications.scala | 55 ++++-- src/dotty/tools/dotc/core/TypeComparer.scala | 198 +++++++++++++++------ .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 8 + 3 files changed, 196 insertions(+), 65 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 12b42642d..60d01c125 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -245,16 +245,22 @@ class TypeApplications(val self: Type) extends AnyVal { self match { case self: ClassInfo => self.cls.typeParams + case self: TypeLambda => + self.typeParams case self: TypeRef => val tsym = self.symbol - if (tsym.isClass) tsym.typeParams else tsym.info.typeParams + if (tsym.isClass) tsym.typeParams + else if (!tsym.isCompleting) tsym.info.typeParams + else Nil case self: RefinedType => val precedingParams = self.parent.typeParams.filterNot(_.memberName == self.refinedName) if (self.isTypeParam) precedingParams :+ self else precedingParams case self: RecType => self.parent.typeParams - case self: SingletonType => + case _: HKApply | _: SingletonType => Nil + case self: WildcardType => + self.optBounds.typeParams case self: TypeProxy => self.underlying.typeParams case _ => @@ -342,7 +348,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeLambda => true case self: HKApply => false case self: SingletonType => false - case self: TypeVar => self.origin.isHK + case self: TypeVar => self.origin.isHK // discrepancy with typeParams, why? case self: WildcardType => self.optBounds.isHK case self: TypeProxy => self.underlying.isHK case _ => false @@ -439,7 +445,11 @@ class TypeApplications(val self: Type) extends AnyVal { */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { def expand(tp: Type) = - if (Config.newHK) TypeLambda.fromSymbols(tparams, tp) + if (Config.newHK) + TypeLambda( + tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( + tl => tparams.map(tparam => tl.lifted(tparams, tparam.info).bounds), + tl => tl.lifted(tparams, tp)) else TypeLambdaOLD( tparams.map(tparam => @@ -579,6 +589,10 @@ class TypeApplications(val self: Type) extends AnyVal { self.EtaExpand(self.typeParamSymbols) } + /** If self is not higher-kinded, eta expand it. */ + def ensureHK(implicit ctx: Context): Type = + if (isHK) self else EtaExpansion(self) + /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = { val hkParams = bound.hkTypeParams @@ -687,8 +701,10 @@ class TypeApplications(val self: Type) extends AnyVal { } } substHkArgs(body) - case self1 => - self1.safeDealias.appliedTo(args, typeParams) + case self1: WildcardType => + self1 + case _ => + self.safeDealias.appliedTo(args, typeParams) } } @@ -712,18 +728,31 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => t } assert(args.nonEmpty) - if (Config.newHK && self.isHK) AppliedType(self, args) - else matchParams(self, typParams, args) match { - case refined @ RefinedType(_, pname, _) if !Config.newHK && pname.isHkArgNameOLD => - refined.betaReduceOLD - case refined => - refined + self.stripTypeVar match { + case self: TypeLambda if !args.exists(_.isInstanceOf[TypeBounds]) => + self.instantiate(args) + case self: AndOrType => + self.derivedAndOrType(self.tp1.appliedTo(args), self.tp2.appliedTo(args)) + case self: LazyRef => + LazyRef(() => self.ref.appliedTo(args, typParams)) + case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => + HKApply(self, args) + case _ => + matchParams(self, typParams, args) match { + case refined @ RefinedType(_, pname, _) if !Config.newHK && pname.isHkArgNameOLD => + refined.betaReduceOLD + case refined => + refined + } } } final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil) final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil) + final def applyIfParameterized(args: List[Type])(implicit ctx: Context): Type = + if (typeParams.nonEmpty) appliedTo(args) else self + /** A cycle-safe version of `appliedTo` where computing type parameters do not force * the typeconstructor. Instead, if the type constructor is completing, we make * up hk type parameters matching the arguments. This is needed when unpickling @@ -733,7 +762,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (Config.newHK) self match { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => - AppliedType(self, args) + HKApply(self, args) case _ => appliedTo(args, typeParams) } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 566865eb4..9449787c1 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -390,40 +390,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) } case tp2 @ HKApply(tycon2, args2) => - def compareHkApply(tycon2: Type): Boolean = tycon2 match { - case tycon2: TypeVar => compareHkApply(tycon2.underlying) - case param2: PolyParam if canConstrain(param2) => - val tparams2 = tycon2.typeParams - - def tyconOK(tycon1a: Type) = - variancesConform(tycon1a.typeParams, tparams2) && { - if (ctx.mode.is(Mode.TypevarsMissContext)) isSubType(tp1, tycon1a.appliedTo(args2)) - else tryInstantiate(param2, tycon1a) && isSubType(tp1, tp2) - } - - tp1 match { - case tp1 @ HKApply(tycon1, _) => - tyconOK(tycon1) || isSubType(tp1.upperBound, tp2) - case _ if tp1.widenDealias.typeSymbol.isClass => - val classBounds = tp2.classSymbols - def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match { - case bc :: bcs1 => - classBounds.exists(bc.derivesFrom) && tyconOK(tp1.baseTypeRef(bc)) || - liftToBase(bcs1) - case _ => - false - } - liftToBase(tp1.baseClasses) - case tp1: TypeProxy => - isSubType(tp1.underlying, tp2) - case _ => - false - } - case _ => - // TODO handle lower bounds of hk params here - false - } - compareHkApply(tycon2) || fourthTry(tp1, tp2) + compareHkApply2(tp1, tp2, tycon2, args2) case tp2 @ TypeLambda(tparams2, body2) => def compareHkLambda = tp1.stripTypeVar match { case tp1 @ TypeLambda(tparams1, body1) => @@ -452,7 +419,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { return isSubType(tp1, OrType(tp21, tp221)) && isSubType(tp1, OrType(tp21, tp222)) case _ => } - eitherIsSubType(tp1, tp21, tp1, tp22) || fourthTry(tp1, tp2) + either(isSubType(tp1, tp21), isSubType(tp1, tp22)) || fourthTry(tp1, tp2) case tp2 @ MethodType(_, formals2) => def compareMethod = tp1 match { case tp1 @ MethodType(_, formals1) => @@ -555,14 +522,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp1: RecType => isNewSubType(tp1.parent, tp2) case HKApply(tycon1, args1) => - tp2 match { - case AppliedType(tycon2, args2) => - assert(!tycon2.isHK) // this should have been handled by thirdTry - isSubType(tycon1, EtaExpansion(tycon2)) && - isSubArgs(args1, args2, tycon2.typeParams) - case _ => - false - } + compareHkApply1(tp1, tycon1, args1, tp2) case EtaExpansion(tycon1) => isSubType(tycon1, tp2) case AndType(tp11, tp12) => @@ -581,7 +541,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { return isSubType(AndType(tp11, tp121), tp2) && isSubType(AndType(tp11, tp122), tp2) case _ => } - eitherIsSubType(tp11, tp2, tp12, tp2) + either(isSubType(tp11, tp2), isSubType(tp12, tp2)) case JavaArrayType(elem1) => def compareJavaArray = tp2 match { case JavaArrayType(elem2) => isSubType(elem1, elem2) @@ -595,6 +555,128 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } + /** Subtype test for the hk application `tp2 = tycon2[args2]`. + */ + def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { + val tparams = tycon2.typeParams + + def isMatchingApply(tp1: Type): Boolean = tp1 match { + case HKApply(tycon1, args1) => + tycon1 match { + case tycon1: PolyParam => + (tycon1 == tycon2 || + canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) && + isSubArgs(args1, args2, tparams) + case tycon1: TypeRef => + tycon2 match { + case tycon2: TypeRef if tycon1.symbol == tycon2.symbol => + isSubType(tycon1.prefix, tycon2.prefix) && + isSubArgs(args1, args2, tparams) + case _ => + false + } + case tycon1: TypeVar => + isMatchingApply(tycon1.underlying) + case tycon1: AnnotatedType => + isMatchingApply(tycon1.underlying) + case _ => + false + } + case _ => + false + } + + /** `param2` can be instantiated to the type constructor of the LHS + * or to the type constructor of one of the LHS base class instances + * and the resulting type application is a supertype of `tp1`, + * or fallback to fourthTry. + */ + def canInstantiate(param2: PolyParam): Boolean = { + + /** `param2` can be instantiated to `tycon1a`. + * and the resulting type application is a supertype of `tp1`. + */ + def tyconOK(tycon1a: Type) = + variancesConform(tycon1a.typeParams, tparams) && { + (ctx.mode.is(Mode.TypevarsMissContext) || + tryInstantiate(param2, tycon1a.ensureHK)) && + isSubType(tp1, tycon1a.appliedTo(args2)) + } + + tp1.widen match { + case tp1w @ HKApply(tycon1, _) => + tyconOK(tycon1) + case tp1w => + tp1w.typeSymbol.isClass && { + val classBounds = tycon2.classSymbols + def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match { + case bc :: bcs1 => + classBounds.exists(bc.derivesFrom) && tyconOK(tp1w.baseTypeRef(bc)) || + liftToBase(bcs1) + case _ => + false + } + liftToBase(tp1w.baseClasses) + } || + fourthTry(tp1, tp2) + } + } + + /** Let `tycon2bounds` be the bounds of the RHS type constructor `tycon2`. + * Let `app2 = tp2` where the type constructor of `tp2` is replaced by + * `tycon2bounds.lo`. + * If both bounds are the same, continue with `tp1 <:< app2`. + * otherwise continue with either + * + * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1) + * tp1 <:< app2 using isSubType (this might instantiate params in tp2) + */ + def compareLower(tycon2bounds: TypeBounds): Boolean = { + val app2 = tycon2bounds.lo.applyIfParameterized(args2) + if (tycon2bounds.lo eq tycon2bounds.hi) isSubType(tp1, app2) + else either(fourthTry(tp1, tp2), isSubType(tp1, app2)) + } + + tycon2 match { + case param2: PolyParam => + isMatchingApply(tp1) || { + if (canConstrain(param2)) canInstantiate(param2) + else compareLower(bounds(param2)) + } + case tycon2: TypeRef => + isMatchingApply(tp1) || + compareLower(tycon2.info.bounds) + case tycon2: TypeVar => + isSubType(tp1, tycon2.underlying.appliedTo(args2)) + case tycon2: AnnotatedType => + compareHkApply2(tp1, tp2, tycon2.underlying, args2) + case _ => + false + } + } + + /** Subtype test for the hk application `tp1 = tycon1[args1]`. + */ + def compareHkApply1(tp1: Type, tycon1: Type, args1: List[Type], tp2: Type): Boolean = + tycon1 match { + case param1: PolyParam => + def canInstantiate = tp2 match { + case AppliedType(tycon2, args2) => + tryInstantiate(param1, tycon2.ensureHK) && isSubArgs(args1, args2, tycon2.typeParams) + case _ => + false + } + canConstrain(param1) && canInstantiate || + isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2) + case tycon1: TypeProxy => + isSubType(tycon1.underlying.applyIfParameterized(args1), tp2) + case _ => + false + } + + /** Subtype test for corresponding arguments in `args1`, `args2` according to + * variances in type parameters `tparams`. + */ def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[MemberBinding]): Boolean = if (args1.isEmpty) args2.isEmpty else args2.nonEmpty && { @@ -678,7 +760,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val hkTypeParams = param.typeParams subtyping.println(i"classBounds = ${app.classSymbols}") subtyping.println(i"base classes = ${other.baseClasses}") - subtyping.println(i"type params = $hkTypeParams") + subtyping.println(i"type params = $hkTypeParams, ${app.classSymbol}") if (inOrder) unifyWith(other) else testLifted(other, app, hkTypeParams, unifyWith) case _ => @@ -775,8 +857,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** The symbol referred to in the refinement of `rt` */ private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol - /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time - * to keep the constraint as wide as possible. Specifically, if + /** Returns true iff the result of evaluating either `op1` or `op2` is true, + * trying at the same time to keep the constraint as wide as possible. + * E.g, if * * tp11 <:< tp12 = true with post-constraint c1 * tp12 <:< tp22 = true with post-constraint c2 @@ -803,15 +886,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * Here, each precondition leads to a different constraint, and neither of * the two post-constraints subsumes the other. */ - private def eitherIsSubType(tp11: Type, tp21: Type, tp12: Type, tp22: Type) = { + private def either(op1: => Boolean, op2: => Boolean): Boolean = { val preConstraint = constraint - isSubType(tp11, tp21) && { + op1 && { val leftConstraint = constraint constraint = preConstraint - if (!(isSubType(tp12, tp22) && subsumes(leftConstraint, constraint, preConstraint))) + if (!(op2 && subsumes(leftConstraint, constraint, preConstraint))) constraint = leftConstraint true - } || isSubType(tp12, tp22) + } || op2 } /** Like tp1 <:< tp2, but returns false immediately if we know that @@ -1533,12 +1616,23 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean = - traceIndented(s"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint") { + traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint") { super.addConstraint(param, bound, fromBelow) } override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) + override def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { + def addendum = tycon2 match { + case param2: PolyParam => + i": it's a polyparam with entry ${ctx.typerState.constraint.entry(param2)}" + case _ => + } + traceIndented(i"compareHkApply $tp1, $tp2, $addendum") { + super.compareHkApply2(tp1, tp2, tycon2, args2) + } + } + override def compareHkApplyOLD(app: RefinedType, other: Type, inOrder: Boolean) = if (app.isHKApplyOLD) traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.normalizeHkApplyOLD}") { diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 18a4e83b6..aa660f73e 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -632,6 +632,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case info => tp.derivedRefinedType(parent1, name, info) } + case tp @ HKApply(tycon, args) => + val tycon1 = tycon.safeDealias + def mapArg(arg: Type) = arg match { + case arg: TypeRef if isBound(arg) => arg.symbol.info + case _ => arg + } + if (tycon1 ne tycon) elim(tycon1.appliedTo(args)) + else tp.derivedAppliedType(tycon, args.map(mapArg)) case _ => tp } -- cgit v1.2.3 From 97e84e66d44e1a8a82a6bea95ff883cc2aec718c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 18 Jun 2016 11:32:06 +0200 Subject: Fix printing of type lambda trees and types --- src/dotty/tools/dotc/printing/PlainPrinter.scala | 4 ++-- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 07819ef77..e86137c47 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -187,6 +187,8 @@ class PlainPrinter(_ctx: Context) extends Printer { } case tp: ExprType => changePrec(GlobalPrec) { "=> " ~ toText(tp.resultType) } + case tp: TypeLambda => + typeLambdaText(tp.paramNames.map(_.toString), tp.variances, tp.paramBounds, tp.resultType) case tp: PolyType => def paramText(name: TypeName, bounds: TypeBounds) = toText(polyParamName(name)) ~ polyHash(tp) ~ toText(bounds) @@ -199,8 +201,6 @@ class PlainPrinter(_ctx: Context) extends Printer { toText(polyParamName(pt.paramNames(n))) ~ polyHash(pt) case AnnotatedType(tpe, annot) => toTextLocal(tpe) ~ " " ~ toText(annot) - case tp: TypeLambda => - typeLambdaText(tp.paramNames.map(_.toString), tp.variances, tp.paramBounds, tp.resultType) case HKApply(tycon, args) => toTextLocal(tycon) ~ "[!" ~ Text(args.map(argText), ", ") ~ "]" case tp: TypeVar => diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 91f896da2..d6ce67cef 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -383,7 +383,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case AppliedTypeTree(tpt, args) => toTextLocal(tpt) ~ "[" ~ Text(args map argText, ", ") ~ "]" case TypeLambdaTree(tparams, body) => - tparamsText(tparams) ~ " -> " ~ toText(body) + changePrec(GlobalPrec) { + tparamsText(tparams) ~ " -> " ~ toText(body) + } case ByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi) => -- cgit v1.2.3 From 73dd03944cdfbc2588e9e41f407e0ad3a48abe96 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:04:49 +0200 Subject: Various hk related fixes in types - Handle hk applications in normalizeToRefs - Handle type lambdas in classSymbol(s) - Fix variances computation in type lambdas - Provide type parameters for uncompleted type lambdas - Revert TermOrHK type characterization --- src/dotty/tools/dotc/core/TypeOps.scala | 51 ++++++++++++++----------- src/dotty/tools/dotc/core/Types.scala | 66 +++++++++++++++++++++------------ src/dotty/tools/dotc/typer/Typer.scala | 1 - 3 files changed, 73 insertions(+), 45 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 9019df6b7..0d02de1da 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -384,28 +384,37 @@ trait TypeOps { this: Context => // TODO: Make standalone object. var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter // Strip all refinements from parent type, populating `refinements` and `formals` maps. - def normalizeToRef(tp: Type): TypeRef = tp.dealias.normalizeHkApplyOLD match { - case tp: TypeRef => - tp - case tp @ RefinedType(tp1, name: TypeName, rinfo) => - rinfo match { - case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) => - // Don't record refinements of the form X = this.X (These can arise using named parameters). - typr.println(s"dropping refinement $tp") - case _ => - val prevInfo = refinements(name) - refinements = refinements.updated(name, - if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo) - formals = formals.updated(name, tp1.typeParamNamed(name)) - } - normalizeToRef(tp1) - case ErrorType => - defn.AnyType - case AnnotatedType(tpe, _) => - normalizeToRef(tpe) - case _ => - throw new TypeError(s"unexpected parent type: $tp") + def normalizeToRef(tp: Type): TypeRef = { + def fail = throw new TypeError(s"unexpected parent type: $tp") + tp.dealias.normalizeHkApplyOLD match { + case tp: TypeRef => + tp + case tp @ RefinedType(tp1, name: TypeName, rinfo) => + rinfo match { + case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) => + // Don't record refinements of the form X = this.X (These can arise using named parameters). + typr.println(s"dropping refinement $tp") + case _ => + val prevInfo = refinements(name) + refinements = refinements.updated(name, + if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo) + formals = formals.updated(name, tp1.typeParamNamed(name)) + } + normalizeToRef(tp1) + case ErrorType => + defn.AnyType + case AnnotatedType(tpe, _) => + normalizeToRef(tpe) + case HKApply(tycon: TypeRef, args) => + tycon.info match { + case TypeAlias(alias) => normalizeToRef(alias.appliedTo(args)) + case _ => fail + } + case _ => + fail + } } + val parentRefs = parents map normalizeToRef // Enter all refinements into current scope. diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 8d152a616..ca60f08a6 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -290,8 +290,8 @@ object Types { case _ => NoSymbol } - /** The least class or trait of which this type is a subtype, or - * NoSymbol if none exists (either because this type is not a + /** The least class or trait of which this type is a subtype or parameterized + * instance, or NoSymbol if none exists (either because this type is not a * value type, or because superclasses are ambiguous). */ final def classSymbol(implicit ctx: Context): Symbol = this match { @@ -302,6 +302,8 @@ object Types { if (sym.isClass) sym else tp.underlying.classSymbol case tp: ClassInfo => tp.cls + case tp: TypeLambda => + tp.resType.classSymbol case tp: SingletonType => NoSymbol case tp: TypeProxy => @@ -330,6 +332,8 @@ object Types { case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.asClass :: Nil else tp.underlying.classSymbols + case tp: TypeLambda => + tp.resType.classSymbols case tp: TypeProxy => tp.underlying.classSymbols case AndType(l, r) => @@ -878,7 +882,7 @@ object Types { if (tp1.exists) tp1.dealias else tp case tp: AnnotatedType => tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot) - case tp => tp + case _ => this } /** Perform successive widenings and dealiasings until none can be applied anymore */ @@ -1191,8 +1195,8 @@ object Types { /** Turn type into a function type. * @pre this is a non-dependent method type. - * @param drop The number of trailing parameters that should be dropped - * when forming the function type. + * @param dropLast The number of trailing parameters that should be dropped + * when forming the function type. */ def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match { case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) => @@ -1331,16 +1335,12 @@ object Types { /** A marker trait for types that apply only to term symbols or that * represent higher-kinded types. */ - trait TermOrHkType extends Type - - /** A marker trait for types that apply only to term symbols. - */ - trait TermType extends TermOrHkType + trait TermType extends Type /** A marker trait for types that can be types of values or prototypes of value types */ trait ValueTypeOrProto extends TermType - /** A marker trait for types that can be types of values */ + /** A marker trait for types that can be types of values or that are higher-kinded */ trait ValueType extends ValueTypeOrProto /** A marker trait for types that are guaranteed to contain only a @@ -2573,7 +2573,7 @@ object Types { } abstract case class PolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends CachedGroundType with BindingType with TermOrHkType with MethodOrPoly { + extends CachedGroundType with BindingType with TermType with MethodOrPoly { val paramBounds = paramBoundsExp(this) val resType = resultTypeExp(this) @@ -2592,7 +2592,7 @@ object Types { case _ => false } - def instantiate(argTypes: List[Type])(implicit ctx: Context): Type = + final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type = resultType.substParams(this, argTypes) def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] = @@ -2653,8 +2653,10 @@ object Types { // ----- HK types: TypeLambda, LambdaParam, HKApply --------------------- /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */ - class TypeLambda(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends PolyType(paramNames)(paramBoundsExp, resultTypeExp) { + class TypeLambda(paramNames: List[TypeName], override val variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) + extends PolyType(paramNames)(paramBoundsExp, resultTypeExp) with ValueType { + + assert(resType.isValueType, this) lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) @@ -2684,27 +2686,45 @@ object Types { pt => pt.lifted(tparams, resultType)) def unapply(tl: TypeLambda): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) + + def any(n: Int)(implicit ctx: Context) = + apply(tpnme.syntheticLambdaParamNames(n), List.fill(n)(0))( + pt => List.fill(n)(TypeBounds.empty), pt => defn.AnyType) } /** A higher kinded type application `C[T_1, ..., T_n]` */ abstract case class HKApply(tycon: Type, args: List[Type]) - extends CachedProxyType with TermOrHkType { + extends CachedProxyType with ValueType { override def underlying(implicit ctx: Context): Type = tycon def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = if ((tycon eq this.tycon) && (args eq this.args)) this else tycon.appliedTo(args) + override def computeHash = doHash(tycon, args) def upperBound(implicit ctx: Context): Type = tycon.stripTypeVar match { case tp: TypeProxy => tp.underlying.appliedTo(args) - case _ => tycon + case _ => defn.AnyType } + def typeParams(implicit ctx: Context): List[MemberBinding] = { + val tparams = tycon.typeParams + if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams + } +/* + def lowerBound(implicit ctx: Context): Type = tycon.stripTypeVar match { + case tp: TypeRef => + val lb = tp.info.bounds.lo.typeParams.length == args.lengt + case _ => defn.NothingType + } +*/ protected def checkInst(implicit ctx: Context): this.type = { - tycon.stripTypeVar match { - case _: TypeRef | _: PolyParam | _: WildcardType | ErrorType => + def check(tycon: Type): Unit = tycon.stripTypeVar match { + case _: TypeRef | _: PolyParam | ErrorType => + case tycon: AnnotatedType => check(tycon.underlying) case _ => assert(false, s"illegal type constructor in $this") } + check(tycon) this } } @@ -3112,8 +3132,8 @@ object Types { */ abstract case class TypeBounds(lo: Type, hi: Type)(val bindingKind: BindingKind) extends CachedProxyType with TypeType { - assert(lo.isInstanceOf[TermOrHkType]) - assert(hi.isInstanceOf[TermOrHkType]) + assert(lo.isInstanceOf[TermType]) + assert(hi.isInstanceOf[TermType]) def variance: Int = 0 def isBinding = bindingKind != NoBinding @@ -3317,7 +3337,7 @@ object Types { object ErrorType extends ErrorType /** Wildcard type, possibly with bounds */ - abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermOrHkType { + abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType { def derivedWildcardType(optBounds: Type)(implicit ctx: Context) = if (optBounds eq this.optBounds) this else if (!optBounds.exists) WildcardType @@ -3508,7 +3528,7 @@ object Types { finally variance = saved } derivedAppliedType(tp, this(tp.tycon), - tp.args.zipWithConserve(tp.tycon.typeParams)(mapArg)) + tp.args.zipWithConserve(tp.typeParams)(mapArg)) case tp: AndOrType => derivedAndOrType(tp, this(tp.tp1), this(tp.tp2)) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 2ab06bf70..49d69f04e 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -17,7 +17,6 @@ import SymDenotations._ import Annotations._ import Names._ import NameOps._ -import Applications._ import Flags._ import Decorators._ import ErrorReporting._ -- cgit v1.2.3 From 02ce995f44c2252f7f7c0f07aa2a86f045b51ac2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:07:51 +0200 Subject: Refactoring of PolyType and TypeLambda Make them each inherit from common BaseType GenericType. That way we avoid inheriting accidentally stuff from PolyType in TypeLambda. Also, Fix adaptation of type lambdas. Don't confuse them with PolyTypes. --- src/dotty/tools/dotc/core/Constraint.scala | 10 +- src/dotty/tools/dotc/core/Definitions.scala | 6 +- src/dotty/tools/dotc/core/OrderingConstraint.scala | 48 ++++----- src/dotty/tools/dotc/core/SymDenotations.scala | 4 +- src/dotty/tools/dotc/core/TypeApplications.scala | 2 +- src/dotty/tools/dotc/core/TypeComparer.scala | 19 ++-- src/dotty/tools/dotc/core/TyperState.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 107 ++++++++++++--------- .../tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 4 +- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 +- .../dotc/transform/FullParameterization.scala | 10 +- src/dotty/tools/dotc/transform/PostTyper.scala | 4 +- src/dotty/tools/dotc/typer/Checking.scala | 2 +- src/dotty/tools/dotc/typer/ProtoTypes.scala | 2 +- src/dotty/tools/dotc/typer/Variances.scala | 2 +- 16 files changed, 124 insertions(+), 102 deletions(-) diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala index 19f93ce47..38f714131 100644 --- a/src/dotty/tools/dotc/core/Constraint.scala +++ b/src/dotty/tools/dotc/core/Constraint.scala @@ -23,7 +23,7 @@ abstract class Constraint extends Showable { type This <: Constraint /** Does the constraint's domain contain the type parameters of `pt`? */ - def contains(pt: PolyType): Boolean + def contains(pt: GenericType): Boolean /** Does the constraint's domain contain the type parameter `param`? */ def contains(param: PolyParam): Boolean @@ -79,7 +79,7 @@ abstract class Constraint extends Showable { * satisfiability but will solved to give instances of * type variables. */ - def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This + def add(poly: GenericType, tvars: List[TypeVar])(implicit ctx: Context): This /** A new constraint which is derived from this constraint by updating * the entry for parameter `param` to `tp`. @@ -122,13 +122,13 @@ abstract class Constraint extends Showable { * entry array, but is going to be removed at the same step, * or -1 if no such parameter exists. */ - def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean + def isRemovable(pt: GenericType, removedParam: Int = -1): Boolean /** A new constraint with all entries coming from `pt` removed. */ - def remove(pt: PolyType)(implicit ctx: Context): This + def remove(pt: GenericType)(implicit ctx: Context): This /** The polytypes constrained by this constraint */ - def domainPolys: List[PolyType] + def domainPolys: List[GenericType] /** The polytype parameters constrained by this constraint */ def domainParams: List[PolyParam] diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala index 5db9a6b0d..8d020a428 100644 --- a/src/dotty/tools/dotc/core/Definitions.scala +++ b/src/dotty/tools/dotc/core/Definitions.scala @@ -86,17 +86,17 @@ class Definitions { } private def newPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = { + resultTypeFn: GenericType => Type, flags: FlagSet = EmptyFlags) = { val tparamNames = tpnme.syntheticTypeParamNames(typeParamCount) val tparamBounds = tparamNames map (_ => TypeBounds.empty) val ptype = PolyType(tparamNames)(_ => tparamBounds, resultTypeFn) newMethod(cls, name, ptype, flags) } - private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = + private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: GenericType => Type, flags: FlagSet) = newPolyMethod(cls, name, 1, resultTypeFn, flags) - private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = + private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: GenericType => Type, flags: FlagSet) = newPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags) private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef] = { diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala index e818862cb..d9f6f5721 100644 --- a/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -15,10 +15,10 @@ import reflect.ClassTag object OrderingConstraint { /** The type of `OrderingConstraint#boundsMap` */ - type ParamBounds = SimpleMap[PolyType, Array[Type]] + type ParamBounds = SimpleMap[GenericType, Array[Type]] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - type ParamOrdering = SimpleMap[PolyType, Array[List[PolyParam]]] + type ParamOrdering = SimpleMap[GenericType, Array[List[PolyParam]]] /** A new constraint with given maps */ private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = { @@ -30,11 +30,11 @@ object OrderingConstraint { /** A lens for updating a single entry array in one of the three constraint maps */ abstract class ConstraintLens[T <: AnyRef: ClassTag] { - def entries(c: OrderingConstraint, poly: PolyType): Array[T] - def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint + def entries(c: OrderingConstraint, poly: GenericType): Array[T] + def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[T])(implicit ctx: Context): OrderingConstraint def initial: T - def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = { + def apply(c: OrderingConstraint, poly: GenericType, idx: Int) = { val es = entries(c, poly) if (es == null) initial else es(idx) } @@ -45,7 +45,7 @@ object OrderingConstraint { * parts of `current` which are not shared by `prev`. */ def update(prev: OrderingConstraint, current: OrderingConstraint, - poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = { + poly: GenericType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = { var es = entries(current, poly) if (es != null && (es(idx) eq entry)) current else { @@ -70,7 +70,7 @@ object OrderingConstraint { update(prev, current, param.binder, param.paramNum, entry) def map(prev: OrderingConstraint, current: OrderingConstraint, - poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint = + poly: GenericType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint = update(prev, current, poly, idx, f(apply(current, poly, idx))) def map(prev: OrderingConstraint, current: OrderingConstraint, @@ -79,25 +79,25 @@ object OrderingConstraint { } val boundsLens = new ConstraintLens[Type] { - def entries(c: OrderingConstraint, poly: PolyType): Array[Type] = + def entries(c: OrderingConstraint, poly: GenericType): Array[Type] = c.boundsMap(poly) - def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint = + def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint = newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap) def initial = NoType } val lowerLens = new ConstraintLens[List[PolyParam]] { - def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] = + def entries(c: OrderingConstraint, poly: GenericType): Array[List[PolyParam]] = c.lowerMap(poly) - def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint = + def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint = newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap) def initial = Nil } val upperLens = new ConstraintLens[List[PolyParam]] { - def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] = + def entries(c: OrderingConstraint, poly: GenericType): Array[List[PolyParam]] = c.upperMap(poly) - def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint = + def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint = newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries)) def initial = Nil } @@ -147,7 +147,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ----------- Contains tests -------------------------------------------------- - def contains(pt: PolyType): Boolean = boundsMap(pt) != null + def contains(pt: GenericType): Boolean = boundsMap(pt) != null def contains(param: PolyParam): Boolean = { val entries = boundsMap(param.binder) @@ -278,7 +278,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, stripParams(tp, paramBuf, isUpper) .orElse(if (isUpper) defn.AnyType else defn.NothingType) - def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = { + def add(poly: GenericType, tvars: List[TypeVar])(implicit ctx: Context): This = { assert(!contains(poly)) val nparams = poly.paramNames.length val entries1 = new Array[Type](nparams * 2) @@ -291,7 +291,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, * Update all bounds to be normalized and update ordering to account for * dependent parameters. */ - private def init(poly: PolyType)(implicit ctx: Context): This = { + private def init(poly: GenericType)(implicit ctx: Context): This = { var current = this val loBuf, hiBuf = new mutable.ListBuffer[PolyParam] var i = 0 @@ -398,7 +398,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def removeParam(ps: List[PolyParam]) = ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx) - def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int): Type = tp match { + def replaceParam(tp: Type, atPoly: GenericType, atIdx: Int): Type = tp match { case bounds @ TypeBounds(lo, hi) => def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = { @@ -438,9 +438,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } } - def remove(pt: PolyType)(implicit ctx: Context): This = { + def remove(pt: GenericType)(implicit ctx: Context): This = { def removeFromOrdering(po: ParamOrdering) = { - def removeFromBoundss(key: PolyType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = { + def removeFromBoundss(key: GenericType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = { val bndss1 = bndss.map(_.filterConserve(_.binder ne pt)) if (bndss.corresponds(bndss1)(_ eq _)) bndss else bndss1 } @@ -449,7 +449,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap)) } - def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean = { + def isRemovable(pt: GenericType, removedParam: Int = -1): Boolean = { val entries = boundsMap(pt) var noneLeft = true var i = paramCount(entries) @@ -467,13 +467,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Exploration -------------------------------------------------------- - def domainPolys: List[PolyType] = boundsMap.keys + def domainPolys: List[GenericType] = boundsMap.keys def domainParams: List[PolyParam] = for { (poly, entries) <- boundsMap.toList n <- 0 until paramCount(entries) - if isBounds(entries(n)) + if true || isBounds(entries(n)) } yield PolyParam(poly, n) def forallParams(p: PolyParam => Boolean): Boolean = { @@ -484,7 +484,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, true } - def foreachParam(p: (PolyType, Int) => Unit): Unit = + def foreachParam(p: (GenericType, Int) => Unit): Unit = boundsMap.foreachBinding { (poly, entries) => 0.until(poly.paramNames.length).foreach(p(poly, _)) } @@ -501,7 +501,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, override def checkClosed()(implicit ctx: Context): Unit = { def isFreePolyParam(tp: Type) = tp match { - case PolyParam(binder, _) => !contains(binder) + case PolyParam(binder: GenericType, _) => !contains(binder) case _ => false } def checkClosedType(tp: Type, where: String) = diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 2692f57a2..46d93b753 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1127,7 +1127,7 @@ object SymDenotations { case tp: NamedType => hasSkolems(tp.prefix) case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo) case tp: RecType => hasSkolems(tp.parent) - case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType) + case tp: GenericType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType) case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType) case tp: ExprType => hasSkolems(tp.resType) case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems) @@ -1658,6 +1658,8 @@ object SymDenotations { case _ => baseTypeRefOf(tp.underlying) } + case tp: HKApply => + baseTypeRefOf(tp.upperBound) // TODO drop? case tp: TypeProxy => baseTypeRefOf(tp.underlying) case AndType(tp1, tp2) => diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 60d01c125..d7c73c63f 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -687,7 +687,7 @@ class TypeApplications(val self: Type) extends AnyVal { final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { if (args.isEmpty || ctx.erasedTypes) self else self.stripTypeVar match { // TODO investigate why we can't do safeDealias here - case self: PolyType if !args.exists(_.isInstanceOf[TypeBounds]) => + case self: GenericType if !args.exists(_.isInstanceOf[TypeBounds]) => self.instantiate(args) case EtaExpansion(self1) => self1.appliedTo(args) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 9449787c1..68a263dfc 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -559,6 +559,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { */ def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { val tparams = tycon2.typeParams + assert(tparams.nonEmpty) def isMatchingApply(tp1: Type): Boolean = tp1 match { case HKApply(tycon1, args1) => @@ -1087,10 +1088,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { formals2.isEmpty } - /** Do poly types `poly1` and `poly2` have type parameters that + /** Do generic types `poly1` and `poly2` have type parameters that * have the same bounds (after renaming one set to the other)? */ - private def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean = + private def matchingTypeParams(poly1: GenericType, poly2: GenericType): Boolean = (poly1.paramBounds corresponds poly2.paramBounds)((b1, b2) => isSameType(b1, b2.subst(poly2, poly1))) @@ -1312,7 +1313,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) else if (Config.newHK) { val numArgs = tparams1.length - def argRefs(tl: PolyType) = List.range(0, numArgs).map(PolyParam(tl, _)) + def argRefs(tl: GenericType) = List.range(0, numArgs).map(PolyParam(tl, _)) TypeLambda( paramNames = tpnme.syntheticLambdaParamNames(numArgs), variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) => @@ -1382,10 +1383,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => mergeConflict(tp1, tp2) } - case tp1: PolyType => + case tp1: GenericType => tp2 match { - case tp2: PolyType if matchingTypeParams(tp1, tp2) => - tp1.derivedPolyType( + case tp2: GenericType if matchingTypeParams(tp1, tp2) => + tp1.derivedGenericType( mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName), tp1.paramBounds, tp1.resultType & tp2.resultType.subst(tp2, tp1)) case _ => @@ -1438,10 +1439,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => mergeConflict(tp1, tp2) } - case tp1: PolyType => + case tp1: GenericType => tp2 match { - case tp2: PolyType if matchingTypeParams(tp1, tp2) => - tp1.derivedPolyType( + case tp2: GenericType if matchingTypeParams(tp1, tp2) => + tp1.derivedGenericType( mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName), tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1)) case _ => diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala index 36f026107..e64335218 100644 --- a/src/dotty/tools/dotc/core/TyperState.scala +++ b/src/dotty/tools/dotc/core/TyperState.scala @@ -127,7 +127,7 @@ extends TyperState(r) { } override def gc()(implicit ctx: Context): Unit = { - val toCollect = new mutable.ListBuffer[PolyType] + val toCollect = new mutable.ListBuffer[GenericType] constraint foreachTypeVar { tvar => if (!tvar.inst.exists) { val inst = instType(tvar) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index ca60f08a6..ac0048e16 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -67,7 +67,8 @@ object Types { * +- MethodType -----+- ImplicitMethodType * | +- JavaMethodType * +- ClassInfo - * +- PolyType --------- TypeLambda + * +- GenericType ----+- PolyType + * | +- TypeLambda * | * +- NoType * +- NoPrefix @@ -1082,10 +1083,10 @@ object Types { } - /** The parameter types in the first parameter section of a PolyType or MethodType, Empty list for others */ + /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */ final def firstParamTypes(implicit ctx: Context): List[Type] = this match { case mt: MethodType => mt.paramTypes - case pt: PolyType => pt.resultType.firstParamTypes + case pt: GenericType => pt.resultType.firstParamTypes case _ => Nil } @@ -2572,8 +2573,9 @@ object Types { } } - abstract case class PolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends CachedGroundType with BindingType with TermType with MethodOrPoly { + /** A common superclass of PolyType and TypeLambda */ + abstract class GenericType(val paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) + extends CachedGroundType with BindingType with TermType { val paramBounds = paramBoundsExp(this) val resType = resultTypeExp(this) @@ -2585,32 +2587,17 @@ object Types { /** If this is a type lambda, the variances of its parameters, otherwise Nil.*/ def variances: List[Int] = Nil - protected def computeSignature(implicit ctx: Context) = resultSignature - - def isPolymorphicMethodType: Boolean = resType match { - case _: MethodType => true - case _ => false - } - final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type = resultType.substParams(this, argTypes) def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] = paramBounds.mapConserve(_.substParams(this, argTypes).bounds) - def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = + def derivedGenericType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this else duplicate(paramNames, paramBounds, resType) - def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context) = - if (this.variances.isEmpty) - PolyType(paramNames)( - x => paramBounds mapConserve (_.subst(this, x).bounds), - x => resType.subst(this, x)) - else - TypeLambda(paramNames, variances)( - x => paramBounds mapConserve (_.subst(this, x).bounds), - x => resType.subst(this, x)) + def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): GenericType def lifted(tparams: List[MemberBinding], t: Type)(implicit ctx: Context): Type = tparams match { @@ -2621,26 +2608,44 @@ object Types { } override def equals(other: Any) = other match { - case other: PolyType => + case other: GenericType => other.paramNames == this.paramNames && other.paramBounds == this.paramBounds && other.resType == this.resType && other.variances == this.variances case _ => false } + override def computeHash = { doHash(variances ::: paramNames, resType, paramBounds) } + } + + /** A type for polymorphic methods */ + class PolyType(paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) + extends GenericType(paramNames)(paramBoundsExp, resultTypeExp) with MethodOrPoly { + + protected def computeSignature(implicit ctx: Context) = resultSignature + + def isPolymorphicMethodType: Boolean = resType match { + case _: MethodType => true + case _ => false + } + + def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = + derivedGenericType(paramNames, paramBounds, resType) + + def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): PolyType = + PolyType(paramNames)( + x => paramBounds mapConserve (_.subst(this, x).bounds), + x => resType.subst(this, x)) override def toString = s"PolyType($paramNames, $paramBounds, $resType)" } - class CachedPolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends PolyType(paramNames)(paramBoundsExp, resultTypeExp) - object PolyType { - def apply(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = { - unique(new CachedPolyType(paramNames)(paramBoundsExp, resultTypeExp)) + def apply(paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)(implicit ctx: Context): PolyType = { + unique(new PolyType(paramNames)(paramBoundsExp, resultTypeExp)) } def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) = @@ -2653,16 +2658,24 @@ object Types { // ----- HK types: TypeLambda, LambdaParam, HKApply --------------------- /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */ - class TypeLambda(paramNames: List[TypeName], override val variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends PolyType(paramNames)(paramBoundsExp, resultTypeExp) with ValueType { + class TypeLambda(paramNames: List[TypeName], override val variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) + extends GenericType(paramNames)(paramBoundsExp, resultTypeExp) with ValueType { assert(resType.isValueType, this) + assert(paramNames.nonEmpty) lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) - override def toString = s"TypeLambda($variances, $paramNames, $paramBounds, $resType)" + def derivedTypeLambda(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = + derivedGenericType(paramNames, paramBounds, resType) + def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): TypeLambda = + TypeLambda(paramNames, variances)( + x => paramBounds mapConserve (_.subst(this, x).bounds), + x => resType.subst(this, x)) + + override def toString = s"TypeLambda($variances, $paramNames, $paramBounds, $resType)" } /** The parameter of a type lambda */ @@ -2676,7 +2689,7 @@ object Types { } object TypeLambda { - def apply(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = { + def apply(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)(implicit ctx: Context): TypeLambda = { unique(new TypeLambda(paramNames, variances)(paramBoundsExp, resultTypeExp)) } def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) = @@ -2720,9 +2733,14 @@ object Types { */ protected def checkInst(implicit ctx: Context): this.type = { def check(tycon: Type): Unit = tycon.stripTypeVar match { - case _: TypeRef | _: PolyParam | ErrorType => - case tycon: AnnotatedType => check(tycon.underlying) - case _ => assert(false, s"illegal type constructor in $this") + case tycon: TypeRef if !tycon.symbol.isClass => + case _: PolyParam | ErrorType => + case _: TypeLambda => + assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this") + case tycon: AnnotatedType => + check(tycon.underlying) + case _ => + assert(false, s"illegal type constructor in $this") } check(tycon) this @@ -2783,8 +2801,8 @@ object Types { } /** TODO Some docs would be nice here! */ - case class PolyParam(binder: PolyType, paramNum: Int) extends ParamType { - type BT = PolyType + case class PolyParam(binder: GenericType, paramNum: Int) extends ParamType { + type BT = GenericType def copyBoundType(bt: BT) = PolyParam(bt, paramNum) /** Looking only at the structure of `bound`, is one of the following true? @@ -2804,7 +2822,7 @@ object Types { override def underlying(implicit ctx: Context): Type = { val bounds = binder.paramBounds - if (bounds == null) NoType // this can happen if the PolyType is not initialized yet + if (bounds == null) NoType // this can happen if the references generic type is not initialized yet else bounds(paramNum) } // no customized hashCode/equals needed because cycle is broken in PolyType @@ -3436,7 +3454,8 @@ object Types { protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type = tp.derivedSuperType(thistp, supertp) protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type = - tp.derivedAppliedType(tycon, args) + if (tycon.typeParams.isEmpty) tycon + else tp.derivedAppliedType(tycon, args) protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type = tp.derivedAndOrType(tp1, tp2) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = @@ -3451,8 +3470,8 @@ object Types { tp.derivedMethodType(tp.paramNames, formals, restpe) protected def derivedExprType(tp: ExprType, restpe: Type): Type = tp.derivedExprType(restpe) - protected def derivedPolyType(tp: PolyType, pbounds: List[TypeBounds], restpe: Type): Type = - tp.derivedPolyType(tp.paramNames, pbounds, restpe) + protected def derivedGenericType(tp: GenericType, pbounds: List[TypeBounds], restpe: Type): Type = + tp.derivedGenericType(tp.paramNames, pbounds, restpe) /** Map this function over given type */ def mapOver(tp: Type): Type = { @@ -3494,12 +3513,12 @@ object Types { case tp: ExprType => derivedExprType(tp, this(tp.resultType)) - case tp: PolyType => + case tp: GenericType => def mapOverPoly = { variance = -variance val bounds1 = tp.paramBounds.mapConserve(this).asInstanceOf[List[TypeBounds]] variance = -variance - derivedPolyType(tp, bounds1, this(tp.resultType)) + derivedGenericType(tp, bounds1, this(tp.resultType)) } mapOverPoly @@ -3713,7 +3732,7 @@ object Types { case ExprType(restpe) => this(x, restpe) - case tp @ PolyType(pnames) => + case tp: GenericType => variance = -variance val y = foldOver(x, tp.paramBounds) variance = -variance diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 6f0596ac0..c428ac8c0 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -311,7 +311,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { result case PARAMtype => readTypeRef() match { - case binder: PolyType => PolyParam(binder, readNat()) + case binder: GenericType => PolyParam(binder, readNat()) case binder: MethodType => MethodParam(binder, readNat()) } case CLASSconst => diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index aa660f73e..239dd4124 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -82,8 +82,8 @@ object Scala2Unpickler { paramNames, paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp), tp.resultType) - case tp @ PolyType(paramNames) => - tp.derivedPolyType(paramNames, tp.paramBounds, arrayToRepeated(tp.resultType)) + case tp: PolyType => + tp.derivedPolyType(tp.paramNames, tp.paramBounds, arrayToRepeated(tp.resultType)) } def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context) = diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index e86137c47..d1a0560f2 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -229,7 +229,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def simpleNameString(sym: Symbol): String = nameString(sym.name) /** If -uniqid is set, the hashcode of the polytype, after a # */ - protected def polyHash(pt: PolyType): Text = + protected def polyHash(pt: GenericType): Text = "#" + pt.hashCode provided ctx.settings.uniqid.value /** If -uniqid is set, the unique id of symbol, after a # */ diff --git a/src/dotty/tools/dotc/transform/FullParameterization.scala b/src/dotty/tools/dotc/transform/FullParameterization.scala index be64df384..d2052d8cb 100644 --- a/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -95,7 +95,7 @@ trait FullParameterization { */ def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Type = { val (mtparamCount, origResult) = info match { - case info @ PolyType(mtnames) => (mtnames.length, info.resultType) + case info: PolyType => (info.paramNames.length, info.resultType) case info: ExprType => (0, info.resultType) case _ => (0, info) } @@ -111,18 +111,18 @@ trait FullParameterization { } /** Replace class type parameters by the added type parameters of the polytype `pt` */ - def mapClassParams(tp: Type, pt: PolyType): Type = { + def mapClassParams(tp: Type, pt: GenericType): Type = { val classParamsRange = (mtparamCount until mtparamCount + ctparams.length).toList tp.substDealias(ctparams, classParamsRange map (PolyParam(pt, _))) } /** The bounds for the added type parameters of the polytype `pt` */ - def mappedClassBounds(pt: PolyType): List[TypeBounds] = + def mappedClassBounds(pt: GenericType): List[TypeBounds] = ctparams.map(tparam => mapClassParams(tparam.info, pt).bounds) info match { - case info @ PolyType(mtnames) => - PolyType(mtnames ++ ctnames)( + case info: PolyType => + PolyType(info.paramNames ++ ctnames)( pt => (info.paramBounds.map(mapClassParams(_, pt).bounds) ++ mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds), diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala index fcde59b24..6b0b2b073 100644 --- a/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/src/dotty/tools/dotc/transform/PostTyper.scala @@ -178,9 +178,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran } val (tycon, args) = decompose(tree) tycon.tpe.widen match { - case PolyType(pnames) => + case tp: PolyType => val (namedArgs, otherArgs) = args.partition(isNamedArg) - val args1 = reorderArgs(pnames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs) + val args1 = reorderArgs(tp.paramNames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs) TypeApply(tycon, args1).withPos(tree.pos).withType(tree.tpe) case _ => tree diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index cfad4e77e..9f218c3f1 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -51,7 +51,7 @@ object Checking { * Note: This does not check the bounds of AppliedTypeTrees. These * are handled by method checkBounds in FirstTransform */ - def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit = + def checkBounds(args: List[tpd.Tree], poly: GenericType)(implicit ctx: Context): Unit = checkBounds(args, poly.paramBounds, _.substParams(poly, _)) /** Traverse type tree, performing the following checks: diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala index 68fd99b3f..9a1337022 100644 --- a/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -286,7 +286,7 @@ object ProtoTypes { override def isMatchedBy(tp: Type)(implicit ctx: Context) = { def isInstantiatable(tp: Type) = tp.widen match { - case PolyType(paramNames) => paramNames.length == targs.length + case tp: GenericType => tp.paramNames.length == targs.length case _ => false } isInstantiatable(tp) || tp.member(nme.apply).hasAltWith(d => isInstantiatable(d.info)) diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala index 9af11a0f4..a2f9a0f16 100644 --- a/src/dotty/tools/dotc/typer/Variances.scala +++ b/src/dotty/tools/dotc/typer/Variances.scala @@ -94,7 +94,7 @@ object Variances { v } varianceInArgs(varianceInType(tycon)(tparam), args, tycon.typeParams) - case tp @ PolyType(_) => + case tp: GenericType => flip(varianceInTypes(tp.paramBounds)(tparam)) & varianceInType(tp.resultType)(tparam) case AnnotatedType(tp, annot) => varianceInType(tp)(tparam) & varianceInAnnot(annot)(tparam) -- cgit v1.2.3 From c28dd1b84ad611bb51e096f2a973c2157569ea86 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:09:12 +0200 Subject: Tweaks to appliedTo - Allow appliedTo over TypeBounds - Difference in handling of empty type parameter lists - Always consider EtaExpansions in appliedTo EtaExpanions were not simplified before in Coder.scala and collections.scala. Need to come back and simplify appliedTo logic. --- src/dotty/tools/dotc/core/TypeApplications.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index d7c73c63f..94e09eaf0 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -118,7 +118,8 @@ object TypeApplications { */ object EtaExpansion { def apply(tycon: Type)(implicit ctx: Context) = { - if (!Config.newHK) assert(tycon.isEtaExpandableOLD) + if (Config.newHK) assert(tycon.typeParams.nonEmpty, tycon) + else assert(tycon.isEtaExpandableOLD) tycon.EtaExpand(tycon.typeParamSymbols) } @@ -729,13 +730,19 @@ class TypeApplications(val self: Type) extends AnyVal { } assert(args.nonEmpty) self.stripTypeVar match { - case self: TypeLambda if !args.exists(_.isInstanceOf[TypeBounds]) => - self.instantiate(args) + case self: TypeLambda => + if (!args.exists(_.isInstanceOf[TypeBounds])) self.instantiate(args) + else self match { + case EtaExpansion(selfTycon) => selfTycon.appliedTo(args) + case _ => HKApply(self, args) + } case self: AndOrType => self.derivedAndOrType(self.tp1.appliedTo(args), self.tp2.appliedTo(args)) + case self: TypeBounds => + self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) case self: LazyRef => LazyRef(() => self.ref.appliedTo(args, typParams)) - case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => + case _ if typParams.nonEmpty && typParams.head.isInstanceOf[LambdaParam] => HKApply(self, args) case _ => matchParams(self, typParams, args) match { -- cgit v1.2.3 From c1e27a035cccda42542e24f3fa6a57c6955c9923 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:10:09 +0200 Subject: Change underlying of HKApply It's now the same as upperBound, i.e. the underlying of the type constructor re-applied to the arguments. --- src/dotty/tools/dotc/core/Types.scala | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index ac0048e16..a9fc294cd 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -152,6 +152,10 @@ object Types { case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls) + case tp: TypeLambda => + tp.resType.derivesFrom(cls) + case tp: HKApply => + tp.tycon.derivesFrom(cls) case tp: TypeProxy => tp.underlying.derivesFrom(cls) case tp: AndType => @@ -458,6 +462,8 @@ object Types { goParam(tp) case tp: RecType => goRec(tp) + case tp: HKApply => + go(tp.upperBound) case tp: TypeProxy => go(tp.underlying) case tp: ClassInfo => @@ -512,6 +518,7 @@ object Types { if (!rt.openedTwice) rt.opened = false } } + def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) val rinfo = tp.refinedInfo @@ -540,6 +547,7 @@ object Types { safeIntersection = ctx.pendingMemberSearches.contains(name)) } } + def goThis(tp: ThisType) = { val d = go(tp.underlying) if (d.exists) @@ -2708,14 +2716,14 @@ object Types { /** A higher kinded type application `C[T_1, ..., T_n]` */ abstract case class HKApply(tycon: Type, args: List[Type]) extends CachedProxyType with ValueType { - override def underlying(implicit ctx: Context): Type = tycon + override def underlying(implicit ctx: Context): Type = upperBound def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = if ((tycon eq this.tycon) && (args eq this.args)) this else tycon.appliedTo(args) override def computeHash = doHash(tycon, args) - def upperBound(implicit ctx: Context): Type = tycon.stripTypeVar match { + def upperBound(implicit ctx: Context): Type = tycon match { case tp: TypeProxy => tp.underlying.appliedTo(args) case _ => defn.AnyType } -- cgit v1.2.3 From 98b466c14cda8ea5a9d2e8efdfada5b735bb4d95 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jun 2016 13:01:16 +0200 Subject: Ensure wildApprox maintains kinds When approximating the constructor of an ak apply to a wildcard type, approximate the whole application by WildcardType. Otherwise we might have a wildcardtype with hk bounds as result. --- src/dotty/tools/dotc/typer/ProtoTypes.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala index 9a1337022..3a13212a3 100644 --- a/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -405,6 +405,11 @@ object ProtoTypes { WildcardType(TypeBounds.upper(wildApprox(mt.paramTypes(pnum)))) case tp: TypeVar => wildApprox(tp.underlying) + case tp @ HKApply(tycon, args) => + wildApprox(tycon) match { + case _: WildcardType => WildcardType // this ensures we get a * type + case tycon1 => tp.derivedAppliedType(tycon1, args.mapConserve(wildApprox(_))) + } case tp: AndType => val tp1a = wildApprox(tp.tp1) val tp2a = wildApprox(tp.tp2) -- cgit v1.2.3 From 0965e1a33fe27edbaa98e7bdf8a635cbc4e19b7d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Jun 2016 13:02:53 +0200 Subject: Handle TypeLambdas in findMember --- src/dotty/tools/dotc/core/TypeComparer.scala | 8 ++------ src/dotty/tools/dotc/core/Types.scala | 9 +++++---- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 68a263dfc..58d286951 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1624,12 +1624,8 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) override def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { - def addendum = tycon2 match { - case param2: PolyParam => - i": it's a polyparam with entry ${ctx.typerState.constraint.entry(param2)}" - case _ => - } - traceIndented(i"compareHkApply $tp1, $tp2, $addendum") { + def addendum = "" + traceIndented(i"compareHkApply $tp1, $tp2$addendum") { super.compareHkApply2(tp1, tp2, tycon2, args2) } } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index a9fc294cd..c23050c19 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -462,12 +462,12 @@ object Types { goParam(tp) case tp: RecType => goRec(tp) - case tp: HKApply => - go(tp.upperBound) case tp: TypeProxy => go(tp.underlying) case tp: ClassInfo => tp.cls.findMember(name, pre, excluded) + case tp: TypeLambda => + go(tp.resType) case AndType(l, r) => goAnd(l, r) case OrType(l, r) => @@ -2302,7 +2302,7 @@ object Types { object AndType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { - assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType]) + assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType], i"$tp1 & $tp2 / " + s"$tp1 & $tp2") if (Config.checkKinds) assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 & $tp2 / " + s"$tp1 & $tp2") unchecked(tp1, tp2) @@ -2742,7 +2742,7 @@ object Types { protected def checkInst(implicit ctx: Context): this.type = { def check(tycon: Type): Unit = tycon.stripTypeVar match { case tycon: TypeRef if !tycon.symbol.isClass => - case _: PolyParam | ErrorType => + case _: PolyParam | ErrorType | _: WildcardType => case _: TypeLambda => assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this") case tycon: AnnotatedType => @@ -3554,6 +3554,7 @@ object Types { try this(arg) finally variance = saved } + assert(tp.args.length == tp.typeParams.length, tp) derivedAppliedType(tp, this(tp.tycon), tp.args.zipWithConserve(tp.typeParams)(mapArg)) -- cgit v1.2.3 From 65c26bacbba91850922cfffabfbc6847102bc489 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 21 Jun 2016 17:24:03 +0200 Subject: Avoid cyclic errors when reading nsc.Global With the change in the next commit this addition is needed to make i859.scala compile. Previously the same effect was achieved accidentally by `updateTypeParams`. The comment admits that we do not really know why the functionality is needed. --- src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 239dd4124..2ea911380 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -135,6 +135,13 @@ object Scala2Unpickler { denot.info = ClassInfo( // final info, except possibly for typeparams ordering denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost) denot.updateTypeParams(tparams) + + // Curiously the following line is needed to make pos/i859.scala compile. + // This test simply accesses scala.tools.nsc.Global. I could not track down why + // the reference is needed - referencing any field of the type parameter + // does the trick, no completion is needed (in fact such completion would + // cause cyclic references elsewhere). + assert(denot.typeParams.forall(_.exists)) } } -- cgit v1.2.3 From 1e48758ad5c100a7dd4d1a5b846ef5ff37e37721 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:12:25 +0200 Subject: Refactor handling of unpickled type params Under the new hk scheme we discovered that type parameters are sometimes unpickled in the wrong order. The fault was always present but the previous hk schemes were somehow lenient enough in their subtyping rules to not discover the problem. E.g., when reading Coder.scala, dotc believed that parameter `A` of `TraversableOnce#BufferedCanBuildFrom` is higher-kinded and parameter `CC` is first-order where the opposite is true. This commit hardens the way we read type parameters in order to make this swap impossible by design. - Revert auto-healing in derivedAppliedType The healing hid a real error about order of type parameters in Scala2 unpickling which was fixed in the previous commits. The healing caused Map.scala to fail because it is possible that type parameters are mis-prediced to be Nil in an F-bounded context. - Smallish fixes to type applications --- src/dotty/tools/dotc/core/SymDenotations.scala | 36 +++++++++----- src/dotty/tools/dotc/core/TypeApplications.scala | 9 ++-- src/dotty/tools/dotc/core/Types.scala | 8 ++- .../dotc/core/classfile/ClassfileParser.scala | 4 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 48 ++++++++++++------ src/dotty/tools/dotc/typer/Checking.scala | 5 +- src/dotty/tools/dotc/typer/Namer.scala | 57 ++++++++++++---------- 7 files changed, 103 insertions(+), 64 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 46d93b753..54884a24c 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1213,15 +1213,25 @@ object SymDenotations { private[this] var myNamedTypeParams: Set[TypeSymbol] = _ + /** The type parameters in this class, in the order they appear in the current + * scope `decls`. This is might be temporarily the incorrect order when + * reading Scala2 pickled info. The problem is fixed by `updateTypeParams` + * which is called once an unpickled symbol has been completed. + */ + private def typeParamsFromDecls(implicit ctx: Context) = + unforcedDecls.filter(sym => + (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]] + /** The type parameters of this class */ override final def typeParams(implicit ctx: Context): List[TypeSymbol] = { - def computeTypeParams = { - if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls - else if (this ne initial) initial.asSymDenotation.typeParams - else unforcedDecls.filter(sym => - (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]] - } - if (myTypeParams == null) myTypeParams = computeTypeParams + if (myTypeParams == null) + myTypeParams = + if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls + else if (this ne initial) initial.asSymDenotation.typeParams + else infoOrCompleter match { + case info: TypeParamsCompleter => info.completerTypeParams(symbol) + case _ => typeParamsFromDecls + } myTypeParams } @@ -1537,16 +1547,16 @@ object SymDenotations { if (myMemberCache != null) myMemberCache invalidate sym.name } - /** Make sure the type parameters of this class are `tparams`, reorder definitions - * in scope if necessary. + /** Make sure the type parameters of this class appear in the order given + * by `tparams` in the scope of the class. Reorder definitions in scope if necessary. * @pre All type parameters in `tparams` are entered in class scope `info.decls`. */ def updateTypeParams(tparams: List[Symbol])(implicit ctx: Context): Unit = - if (!typeParams.corresponds(tparams)(_.name == _.name)) { + if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(typeParams)(_.name == _.name)) { val decls = info.decls val decls1 = newScope for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name)) - for (sym <- decls) if (!typeParams.contains(sym)) decls1.enter(sym) + for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym) info = classInfo.derivedClassInfo(decls = decls1) myTypeParams = null } @@ -1868,9 +1878,9 @@ object SymDenotations { /** A subclass of LazyTypes where type parameters can be completed independently of * the info. */ - abstract class TypeParamsCompleter extends LazyType { + trait TypeParamsCompleter extends LazyType { /** The type parameters computed by the completer before completion has finished */ - def completerTypeParams(sym: Symbol): List[TypeSymbol] + def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] } val NoSymbolFn = (ctx: Context) => NoSymbol diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 94e09eaf0..274fc8ff8 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -4,7 +4,7 @@ package core import Types._ import Contexts._ import Symbols._ -import SymDenotations.TypeParamsCompleter +import SymDenotations.{LazyType, TypeParamsCompleter} import Decorators._ import util.Stats._ import util.common._ @@ -191,7 +191,8 @@ object TypeApplications { else { def bounds(tparam: MemberBinding) = tparam match { case tparam: Symbol => tparam.infoOrCompleter - case tparam: RefinedType => tparam.memberBounds + case tparam: RefinedType if !Config.newHK => tparam.memberBounds + case tparam: LambdaParam => tparam.memberBounds } args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(bounds(tparam))) } @@ -380,7 +381,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: WildcardType => self.optBounds.knownHK case self: PolyParam => self.underlying.knownHK case self: TypeProxy => self.underlying.knownHK - case NoType => 0 + case NoType | _: LazyType => 0 case _ => -1 } @@ -742,7 +743,7 @@ class TypeApplications(val self: Type) extends AnyVal { self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) case self: LazyRef => LazyRef(() => self.ref.appliedTo(args, typParams)) - case _ if typParams.nonEmpty && typParams.head.isInstanceOf[LambdaParam] => + case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => HKApply(self, args) case _ => matchParams(self, typParams, args) match { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index c23050c19..593dcb967 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1436,6 +1436,9 @@ object Types { else computeDenot } + /** Hook for adding debug check code when denotations are assigned */ + final def checkDenot()(implicit ctx: Context) = {} + /** A second fallback to recompute the denotation if necessary */ private def computeDenot(implicit ctx: Context): Denotation = { val savedEphemeral = ctx.typerState.ephemeral @@ -1471,6 +1474,7 @@ object Types { // Don't use setDenot here; double binding checks can give spurious failures after erasure lastDenotation = d + checkDenot() lastSymbol = d.symbol checkedPeriod = ctx.period } @@ -1542,6 +1546,7 @@ object Types { // additional checks that intercept `denot` can be added here lastDenotation = denot + checkDenot() lastSymbol = denot.symbol checkedPeriod = Nowhere } @@ -3462,8 +3467,7 @@ object Types { protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type = tp.derivedSuperType(thistp, supertp) protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type = - if (tycon.typeParams.isEmpty) tycon - else tp.derivedAppliedType(tycon, args) + tp.derivedAppliedType(tycon, args) protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type = tp.derivedAndOrType(tp1, tp2) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 2d7b037b1..813376655 100644 --- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -389,7 +389,7 @@ class ClassfileParser( } index += 1 } - val ownTypeParams = newTParams.toList + val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]] val tpe = if ((owner == null) || !owner.isClass) sig2type(tparams, skiptvs = false) @@ -584,7 +584,7 @@ class ClassfileParser( * a vararg argument. We solve this by creating two constructors, one with * an array, the other with a repeated parameter. */ - def addAnnotationConstructor(classInfo: Type, tparams: List[Symbol] = Nil)(implicit ctx: Context): Unit = { + def addAnnotationConstructor(classInfo: Type, tparams: List[TypeSymbol] = Nil)(implicit ctx: Context): Unit = { def addDefaultGetter(attr: Symbol, n: Int) = ctx.newSymbol( owner = moduleRoot.symbol, diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 2ea911380..1da92d723 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -31,7 +31,7 @@ object Scala2Unpickler { /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - case class TempPolyType(tparams: List[Symbol], tpe: Type) extends UncachedGroundType { + case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType { override def fallbackToText(printer: Printer): Text = "[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe) } @@ -195,8 +195,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex)) } - private var postReadOp: Context => Unit = null - def run()(implicit ctx: Context) = try { var i = 0 @@ -204,10 +202,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if (entries(i) == null && isSymbolEntry(i)) { val savedIndex = readIndex readIndex = index(i) - entries(i) = readSymbol() - if (postReadOp != null) { - postReadOp(ctx) - postReadOp = null + val sym = readSymbol() + entries(i) = sym + sym.infoOrCompleter match { + case info: ClassUnpickler => info.init() + case _ => } readIndex = savedIndex } @@ -493,20 +492,20 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } ctx.newSymbol(owner, name1, flags1, localMemberUnpickler, coord = start) case CLASSsym => - val infoRef = readNat() - postReadOp = implicit ctx => atReadPos(index(infoRef), readTypeParams) // force reading type params early, so they get entered in the right order. + var infoRef = readNat() + if (isSymbolRef(infoRef)) infoRef = readNat() if (isClassRoot) completeRoot( - classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol)) + classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol, infoRef)) else if (isModuleClassRoot) completeRoot( - moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule)) + moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef)) else if (name == tpnme.REFINE_CLASS) // create a type alias instead ctx.newSymbol(owner, name, flags, localMemberUnpickler, coord = start) else { def completer(cls: Symbol) = { - val unpickler = new LocalUnpickler() withDecls symScope(cls) + val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls) if (flags is ModuleClass) unpickler withSourceModule (implicit ctx => cls.owner.info.decls.lookup(cls.name.sourceModuleName) @@ -589,8 +588,27 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas object localMemberUnpickler extends LocalUnpickler - def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol) = - (new LocalUnpickler with SymbolLoaders.SecondCompleter { + class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter { + private def readTypeParams()(implicit ctx: Context): List[TypeSymbol] = { + val tag = readByte() + val end = readNat() + readIndex + if (tag == POLYtpe) { + val unusedRestperef = readNat() + until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]] + } else Nil + } + private def loadTypeParams(implicit ctx: Context) = + atReadPos(index(infoRef), readTypeParams) + + /** Force reading type params early, we need them in setClassInfo of subclasses. */ + def init()(implicit ctx: Context) = loadTypeParams + + def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] = + loadTypeParams + } + + def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int) = + (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter { override def startCoord(denot: SymDenotation): Coord = start }) withDecls symScope(cls) withSourceModule (_ => module) @@ -756,7 +774,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case POLYtpe => val restpe = readTypeRef() val typeParams = until(end, readSymbolRef) - if (typeParams.nonEmpty) TempPolyType(typeParams, restpe.widenExpr) + if (typeParams.nonEmpty) TempPolyType(typeParams.asInstanceOf[List[TypeSymbol]], restpe.widenExpr) else ExprType(restpe) case EXISTENTIALtpe => val restpe = readTypeRef() diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 9f218c3f1..e77222beb 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -194,7 +194,7 @@ object Checking { case SuperType(thistp, _) => isInteresting(thistp) case AndType(tp1, tp2) => isInteresting(tp1) || isInteresting(tp2) case OrType(tp1, tp2) => isInteresting(tp1) && isInteresting(tp2) - case _: RefinedOrRecType => true + case _: RefinedOrRecType | _: HKApply => true case _ => false } if (isInteresting(pre)) { @@ -223,6 +223,9 @@ object Checking { val checker = new CheckNonCyclicMap(sym, reportErrors)(ctx.addMode(Mode.CheckCyclic)) try checker.checkInfo(info) catch { + case ex: AssertionError => + println(s"assertion error for $info") + throw ex case ex: CyclicReference => if (reportErrors) { ctx.error(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.pos) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index bf36942e0..3b193d2db 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -307,9 +307,14 @@ class Namer { typer: Typer => // have no implementation. val cctx = if (tree.name == nme.CONSTRUCTOR && !(tree.mods is JavaDefined)) ctx.outer else ctx + val completer = tree match { + case tree: TypeDef => new TypeDefCompleter(tree)(cctx) + case _ => new Completer(tree)(cctx) + } + recordSym(ctx.newSymbol( ctx.owner, name, flags | deferred | method | higherKinded | inSuperCall1, - adjustIfModule(new Completer(tree)(cctx), tree), + adjustIfModule(completer, tree), privateWithinClass(tree.mods), tree.pos), tree) case tree: Import => recordSym(ctx.newSymbol( @@ -489,32 +494,11 @@ class Namer { typer: Typer => } /** The completer of a symbol defined by a member def or import (except ClassSymbols) */ - class Completer(val original: Tree)(implicit ctx: Context) extends TypeParamsCompleter { + class Completer(val original: Tree)(implicit ctx: Context) extends LazyType { protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original) - private var myTypeParams: List[TypeSymbol] = null - private var nestedCtx: Context = null - - def completerTypeParams(sym: Symbol): List[TypeSymbol] = { - if (myTypeParams == null) { - //println(i"completing type params of $sym in ${sym.owner}") - myTypeParams = original match { - case tdef: TypeDef => - nestedCtx = localContext(sym).setNewScope - locally { - implicit val ctx: Context = nestedCtx - completeParams(tdef.tparams) - tdef.tparams.map(symbolOfTree(_).asType) - } - case _ => - Nil - } - } - myTypeParams - } - - private def typeSig(sym: Symbol): Type = original match { + protected def typeSig(sym: Symbol): Type = original match { case original: ValDef => if (sym is Module) moduleValSig(sym) else valOrDefDefSig(original, sym, Nil, Nil, identity)(localContext(sym).setNewScope) @@ -522,9 +506,6 @@ class Namer { typer: Typer => val typer1 = ctx.typer.newLikeThis nestedTyper(sym) = typer1 typer1.defDefSig(original, sym)(localContext(sym).setTyper(typer1)) - case original: TypeDef => - assert(!original.isClassDef) - typeDefSig(original, sym, completerTypeParams(sym))(nestedCtx) case imp: Import => try { val expr1 = typedAheadExpr(imp.expr, AnySelectionProto) @@ -569,6 +550,28 @@ class Namer { typer: Typer => } } + class TypeDefCompleter(original: TypeDef)(ictx: Context) extends Completer(original)(ictx) with TypeParamsCompleter { + private var myTypeParams: List[TypeSymbol] = null + private var nestedCtx: Context = null + assert(!original.isClassDef) + + def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] = { + if (myTypeParams == null) { + //println(i"completing type params of $sym in ${sym.owner}") + nestedCtx = localContext(sym).setNewScope + myTypeParams = { + implicit val ctx: Context = nestedCtx + completeParams(original.tparams) + original.tparams.map(symbolOfTree(_).asType) + } + } + myTypeParams + } + + override protected def typeSig(sym: Symbol): Type = + typeDefSig(original, sym, completerTypeParams(sym)(ictx))(nestedCtx) + } + class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) { withDecls(newScope) -- cgit v1.2.3 From 960ea75c97e69ae65e2c6df2aa8dd266b0b09e50 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Jun 2016 09:41:52 +0200 Subject: Allow for HK types in widenForMatchSelector --- src/dotty/tools/dotc/typer/Inferencing.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 2b37fa36c..6becd29ec 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -177,12 +177,14 @@ object Inferencing { def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match { case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi) - case tp: AnnotatedType => - tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot) case tp @ RefinedType(parent, rname, rinfo) if !parent.typeSymbol.isClass => tp.derivedRefinedType(widenForMatchSelector(parent), rname, rinfo) case tp: RecType if !tp.parent.typeSymbol.isClass => tp.derivedRecType(widenForMatchSelector(tp.parent)) + case tp: HKApply => + widenForMatchSelector(tp.upperBound) + case tp: AnnotatedType => + tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot) case tp => tp } -- cgit v1.2.3 From e0db04db51bc17d311b95a6f65913d138a955b6c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Jun 2016 09:49:03 +0200 Subject: Drop bounds checking for type lambdas --- src/dotty/tools/dotc/core/TypeComparer.scala | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 58d286951..12e9e638a 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -394,11 +394,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2 @ TypeLambda(tparams2, body2) => def compareHkLambda = tp1.stripTypeVar match { case tp1 @ TypeLambda(tparams1, body1) => - val boundsConform = - tparams1.corresponds(tparams2)((tparam1, tparam2) => - isSubType(tparam2.memberBounds.subst(tp2, tp1), tparam1.memberBounds)) - val bodiesConform = isSubType(body1, body2.subst(tp2, tp1)) - variancesConform(tparams1, tparams2) && boundsConform && bodiesConform + // Don't compare bounds of lambdas, or t2994 will fail + // The issue is that, logically, bounds should compare contravariantly, + // so the bounds checking should look like this: + // + // tparams1.corresponds(tparams2)((tparam1, tparam2) => + // isSubType(tparam2.memberBounds.subst(tp2, tp1), tparam1.memberBounds)) + // + // But that would invalidate a pattern such as + // `[X0 <: Number] -> Number <:< [X0] -> Any` + // This wpuld mean that there is no convenient means anymore to express a kind + // as a supertype. The fix is to delay the checking of bounds so that only + // bounds of * types are checked. + variancesConform(tparams1, tparams2) && isSubType(body1, body2.subst(tp2, tp1)) case _ => fourthTry(tp1, tp2) } -- cgit v1.2.3 From de5d8fe696cdf7acfa80991ceae322aedb1dfd20 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Jun 2016 09:50:53 +0200 Subject: Skip typeBounds when computing upperBound --- src/dotty/tools/dotc/core/Types.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 593dcb967..38a7c14b1 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2729,7 +2729,7 @@ object Types { override def computeHash = doHash(tycon, args) def upperBound(implicit ctx: Context): Type = tycon match { - case tp: TypeProxy => tp.underlying.appliedTo(args) + case tp: TypeProxy => tp.underlying.bounds.hi.appliedTo(args) case _ => defn.AnyType } -- cgit v1.2.3 From 8805dd4f821e06a688fcf492b61033fe0992e752 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 26 Jun 2016 16:57:38 +0200 Subject: When comparing types revert eta-expansion as needed The problem is that some existential types read from Java (and Scala as well? not sure) appear as naked typerefs. They consequently get expanded via eta expansion to type lambdas. This commit compensates for this by collapsing an eta expansion if this can make a subtype tests succeed or a union or intersection be legal. Also, take hk types into account for liftToClasses Needs to special-treat TypeLambda and HKApply since otherwise we risk creating malformed And-types. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 11 +++-- src/dotty/tools/dotc/core/TypeComparer.scala | 48 +++++++++++++++++----- src/dotty/tools/dotc/typer/Implicits.scala | 8 ++++ 3 files changed, 53 insertions(+), 14 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 66767d58a..44b6abe12 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -6,6 +6,7 @@ import Types._, Contexts._, Symbols._ import Decorators._ import config.Config import config.Printers._ +import TypeApplications.EtaExpansion import collection.mutable /** Methods for adding constraints and solving them. @@ -239,8 +240,6 @@ trait ConstraintHandling { def addParamBound(bound: PolyParam) = if (fromBelow) addLess(bound, param) else addLess(param, bound) - assert(param.isHK == bound.isHK, s"$param / $bound / $fromBelow") - /** Drop all constrained parameters that occur at the toplevel in `bound` and * handle them by `addLess` calls. * The preconditions make sure that such parameters occur only @@ -297,7 +296,13 @@ trait ConstraintHandling { case bound: PolyParam if constraint contains bound => addParamBound(bound) case _ => - val pbound = prune(bound) + var pbound = prune(bound) + if (pbound.isHK && !param.isHK) { + param match { + case EtaExpansion(tycon) if tycon.symbol.isClass => pbound = tycon + case _ => + } + } pbound.exists && ( if (fromBelow) addLowerBound(param, pbound) else addUpperBound(param, pbound)) } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 12e9e638a..e6cd0a0df 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -392,12 +392,12 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2 @ HKApply(tycon2, args2) => compareHkApply2(tp1, tp2, tycon2, args2) case tp2 @ TypeLambda(tparams2, body2) => - def compareHkLambda = tp1.stripTypeVar match { + def compareHkLambda: Boolean = tp1.stripTypeVar match { case tp1 @ TypeLambda(tparams1, body1) => // Don't compare bounds of lambdas, or t2994 will fail // The issue is that, logically, bounds should compare contravariantly, // so the bounds checking should look like this: - // + // // tparams1.corresponds(tparams2)((tparam1, tparam2) => // isSubType(tparam2.memberBounds.subst(tp2, tp1), tparam1.memberBounds)) // @@ -408,6 +408,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // bounds of * types are checked. variancesConform(tparams1, tparams2) && isSubType(body1, body2.subst(tp2, tp1)) case _ => + if (!tp1.isHK) { + tp2 match { + case EtaExpansion(tycon2) if tycon2.symbol.isClass => + return isSubType(tp1, tycon2) + case _ => + } + } fourthTry(tp1, tp2) } compareHkLambda @@ -1269,7 +1276,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val t2 = distributeAnd(tp2, tp1) if (t2.exists) t2 else if (erased) erasedGlb(tp1, tp2, isJava = false) - else liftIfHK(tp1, tp2, AndType(_, _)) + else liftIfHK(tp1, tp2, AndType(_, _), _ & _) } } @@ -1293,7 +1300,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val t2 = distributeOr(tp2, tp1) if (t2.exists) t2 else if (erased) erasedLub(tp1, tp2) - else liftIfHK(tp1, tp2, OrType(_, _)) + else liftIfHK(tp1, tp2, OrType(_, _), _ | _) } } @@ -1314,11 +1321,24 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * allowing both interpretations. A possible remedy is to be somehow stricter * in where we allow which interpretation. */ - private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = { + private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = { val tparams1 = tp1.typeParams val tparams2 = tp2.typeParams - if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) - else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2) + if (!Config.newHK && tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) + else if (Config.newHK && tparams1.isEmpty) + if (tparams2.isEmpty) op(tp1, tp2) + else tp2 match { + case EtaExpansion(tycon2) if tycon2.symbol.isClass => original(tp1, tycon2) // TODO: Roll isClass into EtaExpansion? + case _ => mergeConflict(tp1, tp2) + } + else if (Config.newHK && tparams2.isEmpty) { + tp1 match { + case EtaExpansion(tycon1) if tycon1.symbol.isClass => original(tycon1, tp2) + case _ => mergeConflict(tp1, tp2) + } + } + else if (!Config.newHK && (tparams1.isEmpty || tparams2.isEmpty)) op(tp1, tp2) + else if (!Config.newHK && tparams1.length != tparams2.length) mergeConflict(tp1, tp2) else if (Config.newHK) { val numArgs = tparams1.length def argRefs(tl: GenericType) = List.range(0, numArgs).map(PolyParam(tl, _)) @@ -1330,7 +1350,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { tl.lifted(tparams1, tparam1.memberBoundsAsSeenFrom(tp1)).bounds & tl.lifted(tparams2, tparam2.memberBoundsAsSeenFrom(tp2)).bounds), resultTypeExp = tl => - op(tl.lifted(tparams1, tp1).appliedTo(argRefs(tl)), + original(tl.lifted(tparams1, tp1).appliedTo(argRefs(tl)), tl.lifted(tparams2, tp2).appliedTo(argRefs(tl)))) } else { @@ -1389,12 +1409,18 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { mergeNames(names1, names2, nme.syntheticParamName), formals1, tp1.resultType & tp2.resultType.subst(tp2, tp1)) case _ => + tp2 match { + case tp2 @ MethodType(names2, formals2) => + println( + TypeComparer.explained(implicit ctx => isSameType(formals1.head, formals2.head))) + case _ => + } mergeConflict(tp1, tp2) } - case tp1: GenericType => + case tp1: PolyType => tp2 match { - case tp2: GenericType if matchingTypeParams(tp1, tp2) => - tp1.derivedGenericType( + case tp2: PolyType if matchingTypeParams(tp1, tp2) => + tp1.derivedPolyType( mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName), tp1.paramBounds, tp1.resultType & tp2.resultType.subst(tp2, tp1)) case _ => diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index a5246cf6b..d3f9fd777 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -292,6 +292,14 @@ trait ImplicitRunInfo { self: RunInfo => (lead /: tp.classSymbols)(joinClass) case tp: TypeVar => apply(tp.underlying) + case tp: HKApply => + def applyArg(arg: Type) = arg match { + case TypeBounds(lo, hi) => AndType.make(lo, hi) + case _ => arg + } + (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg))) + case tp: TypeLambda => + apply(tp.resType) case _ => mapOver(tp) } -- cgit v1.2.3 From 646bf97d7c2e66b4332b86f2e6cd6440fc442215 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:15:22 +0200 Subject: Beta-reduce with wildcard arguments if this is safe. Makes pos/partialApplications compile. - Fix appliedTo for aliasTypes Returned a general TypeBounds instance before. --- src/dotty/tools/dotc/core/TypeApplications.scala | 48 ++++++++++++++++++++++-- src/dotty/tools/dotc/core/TypeComparer.scala | 6 --- 2 files changed, 45 insertions(+), 9 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 274fc8ff8..714d2a5e3 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -223,6 +223,44 @@ object TypeApplications { mapOver(tp) } } + + /** A type map that tries to reduce a (part of) the result type of the type lambda `tycon` + * with the given `args`(some of which are wildcard arguments represented by type bounds). + * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument + * `>: L <: H` is substituted for a type lambda parameter `X` only if `X` appears + * in a toplevel refinement of the form + * + * { type A = X } + * + * and there are no other occurrences of `X` in the reduced type. In that case + * the refinement above is replaced by + * + * { type A >: L <: U } + * + * The `allReplaced` field indicates whether all occurrences of type lambda parameters + * in the reduced type have been replaced with arguments. + */ + class Reducer(tycon: TypeLambda, args: List[Type])(implicit ctx: Context) extends TypeMap { + private var available = Set((0 until args.length): _*) + var allReplaced = true + def hasWildcardArg(p: PolyParam) = + p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds] + def apply(t: Type) = t match { + case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && available.contains(p.paramNum) => + available -= p.paramNum + args(p.paramNum) + case p: PolyParam if p.binder == tycon => + if (hasWildcardArg(p)) { allReplaced = false; p } + else args(p.paramNum) + case _: TypeBounds | _: HKApply => + val saved = available + available = Set() + try mapOver(t) + finally available = saved + case _ => + mapOver(t) + } + } } import TypeApplications._ @@ -733,12 +771,16 @@ class TypeApplications(val self: Type) extends AnyVal { self.stripTypeVar match { case self: TypeLambda => if (!args.exists(_.isInstanceOf[TypeBounds])) self.instantiate(args) - else self match { - case EtaExpansion(selfTycon) => selfTycon.appliedTo(args) - case _ => HKApply(self, args) + else { + val reducer = new Reducer(self, args) + val reduced = reducer(self.resType) + if (reducer.allReplaced) reduced + else HKApply(self, args) } case self: AndOrType => self.derivedAndOrType(self.tp1.appliedTo(args), self.tp2.appliedTo(args)) + case self: TypeAlias => + self.derivedTypeAlias(self.alias.appliedTo(args)) case self: TypeBounds => self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) case self: LazyRef => diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index e6cd0a0df..d0c299ae4 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1409,12 +1409,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { mergeNames(names1, names2, nme.syntheticParamName), formals1, tp1.resultType & tp2.resultType.subst(tp2, tp1)) case _ => - tp2 match { - case tp2 @ MethodType(names2, formals2) => - println( - TypeComparer.explained(implicit ctx => isSameType(formals1.head, formals2.head))) - case _ => - } mergeConflict(tp1, tp2) } case tp1: PolyType => -- cgit v1.2.3 From 09f7ab1554b1207beb1a7c604e103a1178b6a263 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 27 Jun 2016 10:43:28 +0200 Subject: Handle findMember for unreduced hk types. - Follow upper bound, as before. - But if type constructor is a lambda, needs special treatment. --- src/dotty/tools/dotc/core/Types.scala | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 38a7c14b1..f41dca433 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -462,6 +462,8 @@ object Types { goParam(tp) case tp: RecType => goRec(tp) + case tp: HKApply => + goApply(tp) case tp: TypeProxy => go(tp.underlying) case tp: ClassInfo => @@ -548,6 +550,16 @@ object Types { } } + def goApply(tp: HKApply) = tp.tycon match { + case tl: TypeLambda => + val res = + go(tl.resType).mapInfo(info => + tl.derivedTypeLambda(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args)) + //println(i"remapping $tp . $name to ${res.info}")// " / ${res.toString}") + res + case _ => go(tp.underlying) + } + def goThis(tp: ThisType) = { val d = go(tp.underlying) if (d.exists) @@ -2674,14 +2686,21 @@ object Types { class TypeLambda(paramNames: List[TypeName], override val variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) extends GenericType(paramNames)(paramBoundsExp, resultTypeExp) with ValueType { - assert(resType.isValueType, this) + assert(resType.isInstanceOf[TermType], this) assert(paramNames.nonEmpty) lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) - def derivedTypeLambda(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = - derivedGenericType(paramNames, paramBounds, resType) + def derivedTypeLambda(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type = + resType match { + case resType @ TypeAlias(alias) => + resType.derivedTypeAlias(duplicate(paramNames, paramBounds, alias)) + case resType @ TypeBounds(lo, hi) => + resType.derivedTypeBounds(lo, duplicate(paramNames, paramBounds, hi)) + case _ => + derivedGenericType(paramNames, paramBounds, resType) + } def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): TypeLambda = TypeLambda(paramNames, variances)( -- cgit v1.2.3 From 9a90e8167f4694576670a5943ec147757b460af5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:16:46 +0200 Subject: Handle WildcardType in liftToClasses --- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 +- src/dotty/tools/dotc/typer/Implicits.scala | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index d1a0560f2..880804b9e 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -202,7 +202,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case AnnotatedType(tpe, annot) => toTextLocal(tpe) ~ " " ~ toText(annot) case HKApply(tycon, args) => - toTextLocal(tycon) ~ "[!" ~ Text(args.map(argText), ", ") ~ "]" + toTextLocal(tycon) ~ "[" ~ Text(args.map(argText), ", ") ~ "]" case tp: TypeVar => if (tp.isInstantiated) toTextLocal(tp.instanceOpt) ~ "'" // debug for now, so that we can see where the TypeVars are. diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index d3f9fd777..91a67de9a 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -295,6 +295,7 @@ trait ImplicitRunInfo { self: RunInfo => case tp: HKApply => def applyArg(arg: Type) = arg match { case TypeBounds(lo, hi) => AndType.make(lo, hi) + case _: WildcardType => defn.AnyType case _ => arg } (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg))) -- cgit v1.2.3 From dca10528b40632860696d65cfe699d6dfcb62ec0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:18:04 +0200 Subject: Don't make * types higher-kinded in avoid Also, fix error counts in tests for new hk scheme. --- src/dotty/tools/dotc/typer/TypeAssigner.scala | 6 ++++-- tests/neg/hklower.scala | 2 +- tests/neg/ski.scala | 18 +++++++++--------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index 27cc0e6f5..e26ea1138 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -97,8 +97,10 @@ trait TypeAssigner { } case tp @ AppliedType(tycon, args) if toAvoid(tycon) => val base = apply(tycon) - val args = tp.baseArgInfos(base.typeSymbol) - if (base.typeParams.length == args.length) base.appliedTo(args) else base + var args = tp.baseArgInfos(base.typeSymbol) + if (base.typeParams.length != args.length) + args = base.typeParams.map(_.memberBounds) + base.appliedTo(args) case tp @ RefinedType(parent, name, rinfo) if variance > 0 => val parent1 = apply(tp.parent) val refinedInfo1 = apply(rinfo) diff --git a/tests/neg/hklower.scala b/tests/neg/hklower.scala index e29a1545e..e1e79070e 100644 --- a/tests/neg/hklower.scala +++ b/tests/neg/hklower.scala @@ -1,4 +1,4 @@ -class Test { +class Test { // error: conflicting bounds type T[X] // OK type U[X] = T[X] // OK diff --git a/tests/neg/ski.scala b/tests/neg/ski.scala index 3d44e77da..90a43039a 100644 --- a/tests/neg/ski.scala +++ b/tests/neg/ski.scala @@ -76,29 +76,29 @@ object Test { type T5 = Equals[K#ap[c]#ap[d]#eval, c] // KKcde -> d - type T6 = Equals[K#ap[K]#ap[c]#ap[d]#ap[e]#eval, d] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, e]#eval does not conform to upper bound d + type T6 = Equals[K#ap[K]#ap[c]#ap[d]#ap[e]#eval, d] // SIIIc -> Ic - type T7 = Equals[S#ap[I]#ap[I]#ap[I]#ap[c]#eval, c] // error: not a legal path // error: Type argument I1[_ <: Term]#eval#ap[_]#eval does not conform to upper bound c + type T7 = Equals[S#ap[I]#ap[I]#ap[I]#ap[c]#eval, c] // SKKc -> Ic type T8 = Equals[S#ap[K]#ap[K]#ap[c]#eval, c] // SIIKc -> KKc - type T9 = Equals[S#ap[I]#ap[I]#ap[K]#ap[c]#eval, K#ap[K]#ap[c]#eval] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, _ <: Term]#eval does not conform to upper bound K2[K, c]#eval + type T9 = Equals[S#ap[I]#ap[I]#ap[K]#ap[c]#eval, K#ap[K]#ap[c]#eval] // SIKKc -> K(KK)c - type T10 = Equals[S#ap[I]#ap[K]#ap[K]#ap[c]#eval, K#ap[K#ap[K]]#ap[c]#eval] // error: Type argument K2[K1[_ <: Term] @UnsafeNonvariant#x, _ <: Term]#eval does not conform to upper bound K2[K1[K], c]#eval + type T10 = Equals[S#ap[I]#ap[K]#ap[K]#ap[c]#eval, K#ap[K#ap[K]]#ap[c]#eval] // SIKIc -> KIc - type T11 = Equals[S#ap[I]#ap[K]#ap[I]#ap[c]#eval, K#ap[I]#ap[c]#eval] // error: not a legal path // error: Type argument I1[_ <: Term]#eval#ap[_]#eval does not conform to upper bound K2[I, c]#eval + type T11 = Equals[S#ap[I]#ap[K]#ap[I]#ap[c]#eval, K#ap[I]#ap[c]#eval] // SKIc -> Ic type T12 = Equals[S#ap[K]#ap[I]#ap[c]#eval, c] // R = S(K(SI))K (reverse) type R = S#ap[K#ap[S#ap[I]]]#ap[K] - type T13 = Equals[R#ap[c]#ap[d]#eval, d#ap[c]#eval] // error: Type argument S3[I, S2[I, _ <: Term] @UnsafeNonvariant#y, _ <: Term]#eval does not conform to upper bound d#eval + type T13 = Equals[R#ap[c]#ap[d]#eval, d#ap[c]#eval] type b[a <: Term] = S#ap[K#ap[a]]#ap[S#ap[I]#ap[I]] @@ -116,15 +116,15 @@ object Test { } type NN1 = b[R]#ap[b[R]]#ap[A0] - type T13a = Equals[NN1#eval, c] // error: Type argument Test.NN1#eval does not conform to upper bound c + type T13a = Equals[NN1#eval, c] // Double iteration type NN2 = b[R]#ap[b[R]]#ap[A1] - type T14 = Equals[NN2#eval, c] // error: Type argument Test.NN2#eval does not conform to upper bound c + type T14 = Equals[NN2#eval, c] // Triple iteration type NN3 = b[R]#ap[b[R]]#ap[A2] - type T15 = Equals[NN3#eval, c] // error: Type argument Test.NN3#eval does not conform to upper bound c + type T15 = Equals[NN3#eval, c] trait An extends Term { type ap[x <: Term] = x#ap[An]#eval // error: not a legal path -- cgit v1.2.3 From 4093e138f682ce0c78eed36861962c1e68022098 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:18:49 +0200 Subject: Don't dealias when applying types Keep the higher-kinded application instead. This should be better for inference. We have to evaluate the performance impact. Maybe dealias if the kind stays the same? Also, reduce Nothing[...] to Nothing Nothing cannot be a type constructor in a HK application because it does not have type parameters. Avoid the problemn by the reduction above. --- src/dotty/tools/dotc/core/TypeApplications.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 714d2a5e3..e989e42b7 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -744,7 +744,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self1: WildcardType => self1 case _ => - self.safeDealias.appliedTo(args, typeParams) + self.appliedTo(args, typeParams) } } @@ -768,7 +768,7 @@ class TypeApplications(val self: Type) extends AnyVal { case nil => t } assert(args.nonEmpty) - self.stripTypeVar match { + self.stripTypeVar.safeDealias match { case self: TypeLambda => if (!args.exists(_.isInstanceOf[TypeBounds])) self.instantiate(args) else { @@ -785,10 +785,12 @@ class TypeApplications(val self: Type) extends AnyVal { self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) case self: LazyRef => LazyRef(() => self.ref.appliedTo(args, typParams)) + case self: TypeRef if self.symbol == defn.NothingClass => + self case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => HKApply(self, args) - case _ => - matchParams(self, typParams, args) match { + case dealiased => + matchParams(dealiased, typParams, args) match { case refined @ RefinedType(_, pname, _) if !Config.newHK && pname.isHkArgNameOLD => refined.betaReduceOLD case refined => -- cgit v1.2.3 From bb5993155f4649d974d8dd2c3341dab783aeb7d0 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:19:44 +0200 Subject: Fixes to hk comparisons - Follow aliases in isMatchingApply - Avoid comparing with TypeBounds in compareHkApply - Generalize kind adaption in liftIfHK Previously we only covered EtaExpansion/reduction. Need to generalize this to the case where a type is partially applied. Test case is pos/partialApplications.scala --- src/dotty/tools/dotc/core/TypeComparer.scala | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index d0c299ae4..38b45b2b0 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -578,13 +578,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { def isMatchingApply(tp1: Type): Boolean = tp1 match { case HKApply(tycon1, args1) => - tycon1 match { + tycon1.dealias match { case tycon1: PolyParam => (tycon1 == tycon2 || canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) && isSubArgs(args1, args2, tparams) case tycon1: TypeRef => - tycon2 match { + tycon2.dealias match { case tycon2: TypeRef if tycon1.symbol == tycon2.symbol => isSubType(tycon1.prefix, tycon2.prefix) && isSubArgs(args1, args2, tparams) @@ -685,7 +685,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2) case tycon1: TypeProxy => - isSubType(tycon1.underlying.applyIfParameterized(args1), tp2) + isSubType(tycon1.underlying.bounds.hi.applyIfParameterized(args1), tp2) case _ => false } @@ -1327,16 +1327,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { if (!Config.newHK && tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) else if (Config.newHK && tparams1.isEmpty) if (tparams2.isEmpty) op(tp1, tp2) - else tp2 match { - case EtaExpansion(tycon2) if tycon2.symbol.isClass => original(tp1, tycon2) // TODO: Roll isClass into EtaExpansion? - case _ => mergeConflict(tp1, tp2) - } - else if (Config.newHK && tparams2.isEmpty) { - tp1 match { - case EtaExpansion(tycon1) if tycon1.symbol.isClass => original(tycon1, tp2) - case _ => mergeConflict(tp1, tp2) - } - } + else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.memberBoundsAsSeenFrom(tp2)))) + else if (Config.newHK && tparams2.isEmpty) + original(tp1.appliedTo(tp1.typeParams.map(_.memberBoundsAsSeenFrom(tp1))), tp2) else if (!Config.newHK && (tparams1.isEmpty || tparams2.isEmpty)) op(tp1, tp2) else if (!Config.newHK && tparams1.length != tparams2.length) mergeConflict(tp1, tp2) else if (Config.newHK) { @@ -1493,7 +1486,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case bounds: TypeBounds => i"type bounds $bounds" case _ => tp.show } - throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2) + if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2) + else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") } /** Merge two lists of names. If names in corresponding positions match, keep them, -- cgit v1.2.3 From dc5be65541d17cb726b3a88ef052e65f13f0cb18 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 28 Jun 2016 17:52:52 +0200 Subject: Special case for TypeRefs in upperbound Done for efficiency. Also that way we would do something meaningful if the type constructor was, say, Nothing. However, that case is now rules out anyways. --- src/dotty/tools/dotc/core/Types.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index f41dca433..387122522 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2748,7 +2748,12 @@ object Types { override def computeHash = doHash(tycon, args) def upperBound(implicit ctx: Context): Type = tycon match { - case tp: TypeProxy => tp.underlying.bounds.hi.appliedTo(args) + case tp: TypeRef => + tp.info match { + case TypeBounds(_, hi) => hi.appliedTo(args) + case _ => tp + } + case tp: TypeProxy => tp.underlying.appliedTo(args) case _ => defn.AnyType } -- cgit v1.2.3 From 31ecad5922c1a66d3ddd01163086eb0326299d35 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:20:40 +0200 Subject: Drop assertion in TypeMap The deleted assertion could fail for code that was erroneous. - Enable new hk scheme. --- src/dotty/tools/dotc/config/Config.scala | 8 +++++--- src/dotty/tools/dotc/core/Types.scala | 4 +--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index bf6f8493b..cd2b7ea4b 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -10,7 +10,7 @@ object Config { final val checkCacheMembersNamed = false - final val newHK = false + final val newHK = true /** When updating a constraint bound, check that the constrained parameter * does not appear at the top-level of either of its bounds. @@ -80,9 +80,11 @@ object Config { final val checkProjections = false /** If this flag is set it is checked that &/| only apply to types - * that are either both hk types or both * types. + * that are either both hk types or both * types. Should be used + * only for debugging as there a generic class without arguments + * can be produced in an And by Implicits.liftToClasses. */ - final val checkKinds = true + final val checkKinds = false /** The recursion depth for showing a summarized string */ final val summarizeDepth = 2 diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 387122522..c9d2b5029 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2356,8 +2356,7 @@ object Types { object OrType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { assertUnerased() - if (Config.checkKinds) - assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 | $tp2") + if (Config.checkKinds) assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 | $tp2") unique(new CachedOrType(tp1, tp2)) } def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type = @@ -3582,7 +3581,6 @@ object Types { try this(arg) finally variance = saved } - assert(tp.args.length == tp.typeParams.length, tp) derivedAppliedType(tp, this(tp.tycon), tp.args.zipWithConserve(tp.typeParams)(mapArg)) -- cgit v1.2.3 From 9d9965c35c4387669776f94a64fcb08fd8bf9dcf Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 28 Jun 2016 21:16:35 +0200 Subject: Fix parameter unpickling Parameter names always come in as term names, ahve to be explicitly converted to type names. The previous implementation used a cast instead of a conversion, which caused a ClassCastException. For symmetry we model readParamNames such that it returns a List[Name] which has to be explicitly converted to a List[TermName] or a List[TypeName], using a map. --- src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index c428ac8c0..2d9b82c97 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -197,9 +197,9 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { // ------ Reading types ----------------------------------------------------- /** Read names in an interleaved sequence of (parameter) names and types/bounds */ - def readParamNames[N <: Name](end: Addr): List[N] = + def readParamNames(end: Addr): List[Name] = until(end) { - val name = readName().asInstanceOf[N] + val name = readName() skipTree() name } @@ -244,11 +244,11 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { def readLengthType(): Type = { val end = readEnd() - def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = { + def readNamesSkipParams: (List[Name], TreeReader) = { val nameReader = fork nameReader.skipTree() // skip result val paramReader = nameReader.fork - (nameReader.readParamNames[N](end), paramReader) + (nameReader.readParamNames(end), paramReader) } val result = @@ -288,24 +288,24 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { registerSym(start, sym) TypeRef.withFixedSym(NoPrefix, sym.name, sym) case LAMBDAtype => - val (rawNames, paramReader) = readNamesSkipParams[TypeName] + val (rawNames, paramReader) = readNamesSkipParams val (variances, paramNames) = rawNames - .map(name => (prefixToVariance(name.head), name.tail.asTypeName)).unzip + .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip val result = TypeLambda(paramNames, variances)( pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)), pt => readType()) goto(end) result case POLYtype => - val (names, paramReader) = readNamesSkipParams[TypeName] - val result = PolyType(names)( + val (names, paramReader) = readNamesSkipParams + val result = PolyType(names.map(_.toTypeName))( pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)), pt => readType()) goto(end) result case METHODtype => - val (names, paramReader) = readNamesSkipParams[TermName] - val result = MethodType(names, paramReader.readParamTypes[Type](end))( + val (names, paramReader) = readNamesSkipParams + val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))( mt => registeringType(mt, readType())) goto(end) result -- cgit v1.2.3 From a23c1a476296a25566d7aa08de676a1217b243cb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:22:14 +0200 Subject: Fixes for printing under -Ytest-pickler - Make printing package ids more robost Crashed before when printers were turned on during test pickling mode. - Make Denotation#bringForward more robost Assertion failed before when printers were turned on during test pickling mode. --- src/dotty/tools/dotc/core/Denotations.scala | 3 ++- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala index 494df7547..09971d1d1 100644 --- a/src/dotty/tools/dotc/core/Denotations.scala +++ b/src/dotty/tools/dotc/core/Denotations.scala @@ -601,7 +601,8 @@ object Denotations { */ private def bringForward()(implicit ctx: Context): SingleDenotation = this match { case denot: SymDenotation if ctx.stillValid(denot) => - assert(ctx.runId > validFor.runId, s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor") + assert(ctx.runId > validFor.runId || ctx.settings.YtestPickler.value, // mixing test pickler with debug printing can travel back in time + s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor") var d: SingleDenotation = denot do { d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index d6ce67cef..c772267e7 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -292,7 +292,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } def toTextPackageId(pid: Tree): Text = - if (homogenizedView) toTextLocal(pid.tpe) + if (homogenizedView && pid.hasType) toTextLocal(pid.tpe) else toTextLocal(pid) var txt: Text = tree match { -- cgit v1.2.3 From 6bd7ba9ea4484ee2065dd16077cba6c26b2050d9 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 09:50:27 +0200 Subject: Remove refinement encoding of hk types Remove the code that implemented the encoding of hk types using refinements. Drop the notion that RefinedTypes can be type parameters. This is no longer true under the new representation. Also, refactoring MemberBinding -> TypeParamInfo --- src/dotty/tools/dotc/config/Config.scala | 2 - src/dotty/tools/dotc/core/ConstraintHandling.scala | 2 - src/dotty/tools/dotc/core/MemberBinding.scala | 35 --- src/dotty/tools/dotc/core/NameOps.scala | 15 - src/dotty/tools/dotc/core/StdNames.scala | 6 - src/dotty/tools/dotc/core/Symbols.scala | 12 +- src/dotty/tools/dotc/core/TypeApplications.scala | 344 +++------------------ src/dotty/tools/dotc/core/TypeComparer.scala | 175 +---------- src/dotty/tools/dotc/core/TypeOps.scala | 3 +- src/dotty/tools/dotc/core/TypeParamInfo.scala | 29 ++ src/dotty/tools/dotc/core/Types.scala | 82 +---- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 2 +- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 - src/dotty/tools/dotc/printing/RefinedPrinter.scala | 30 -- src/dotty/tools/dotc/typer/TypeAssigner.scala | 18 +- src/dotty/tools/dotc/typer/Typer.scala | 4 +- src/dotty/tools/dotc/typer/Variances.scala | 4 +- 17 files changed, 112 insertions(+), 653 deletions(-) delete mode 100644 src/dotty/tools/dotc/core/MemberBinding.scala create mode 100644 src/dotty/tools/dotc/core/TypeParamInfo.scala diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index cd2b7ea4b..eba42e881 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -10,8 +10,6 @@ object Config { final val checkCacheMembersNamed = false - final val newHK = true - /** When updating a constraint bound, check that the constrained parameter * does not appear at the top-level of either of its bounds. */ diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 44b6abe12..ace441566 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -286,8 +286,6 @@ trait ConstraintHandling { if (!addParamBound(bound)) NoType else if (fromBelow) defn.NothingType else defn.AnyType - case bound: RefinedType if !Config.newHK => - bound.normalizeHkApplyOLD case _ => bound } diff --git a/src/dotty/tools/dotc/core/MemberBinding.scala b/src/dotty/tools/dotc/core/MemberBinding.scala deleted file mode 100644 index bff8b30a0..000000000 --- a/src/dotty/tools/dotc/core/MemberBinding.scala +++ /dev/null @@ -1,35 +0,0 @@ -package dotty.tools.dotc.core - -import Names.Name -import Contexts.Context -import Types.{Type, TypeBounds} - -/** A common super trait of Symbol and Refinement. - * Used to capture the attributes of type parameters - * which can be implemented as either symbols or refinements. - * TODO: Rename (TypeParamInfo?) - */ -trait MemberBinding { - - /** Does this binding represent a type parameter? - * Only in that case the rest of the binding's methods are significant. - */ - def isTypeParam(implicit ctx: Context): Boolean - - /** The name of the member */ - def memberName(implicit ctx: Context): Name - - /** The info of the member */ - def memberBounds(implicit ctx: Context): TypeBounds - - /** The info of the member as seen from a prefix type. - * This can be different from `memberInfo` if the binding - * is a type symbol of a class. - */ - def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds - - /** The variance of the type parameter - * @pre: isTypeParam = true - */ - def memberVariance(implicit ctx: Context): Int -} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala index 120540dc7..f5e0eb8cd 100644 --- a/src/dotty/tools/dotc/core/NameOps.scala +++ b/src/dotty/tools/dotc/core/NameOps.scala @@ -101,21 +101,6 @@ object NameOps { name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head) } - /** Is this the name of a higher-kinded type parameter of a Lambda? */ - def isHkArgNameOLD = - name.length > 0 && - name.head == tpnme.hkArgPrefixHeadOLD && - name.startsWith(tpnme.hkArgPrefixOLD) && { - val digits = name.drop(tpnme.hkArgPrefixLengthOLD) - digits.length <= 4 && digits.forall(_.isDigit) - } - - /** The index of the higher-kinded type parameter with this name. - * Pre: isLambdaArgName. - */ - def hkArgIndexOLD: Int = - name.drop(tpnme.hkArgPrefixLengthOLD).toString.toInt - /** If the name ends with $nn where nn are * all digits, strip the $ and the digits. * Otherwise return the argument. diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 0adf80d8f..778d13cab 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -529,10 +529,6 @@ object StdNames { val synthSwitch: N = "$synthSwitch" - val hkArgPrefixOLD: N = "$hk" - val hkArgPrefixHeadOLD: Char = hkArgPrefixOLD.head - val hkArgPrefixLengthOLD: Int = hkArgPrefixOLD.length - // unencoded operators object raw { final val AMP : N = "&" @@ -746,8 +742,6 @@ object StdNames { def syntheticLambdaParamNames(num: Int): List[TypeName] = (0 until num).map(syntheticLambdaParamName)(breakOut) - def hkArgOLD(n: Int): TypeName = hkArgPrefixOLD ++ n.toString - final val Conforms = encode("<:<") } diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala index c7eb54812..df8bc8116 100644 --- a/src/dotty/tools/dotc/core/Symbols.scala +++ b/src/dotty/tools/dotc/core/Symbols.scala @@ -367,7 +367,7 @@ object Symbols { * @param coord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ - class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with MemberBinding with printing.Showable { + class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with TypeParamInfo with printing.Showable { type ThisName <: Name @@ -489,12 +489,12 @@ object Symbols { */ def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition - // MemberBinding methods + // TypeParamInfo methods def isTypeParam(implicit ctx: Context) = denot.is(TypeParam) - def memberName(implicit ctx: Context): Name = name - def memberBounds(implicit ctx: Context) = denot.info.bounds - def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds - def memberVariance(implicit ctx: Context) = denot.variance + def paramName(implicit ctx: Context): Name = name + def paramBounds(implicit ctx: Context) = denot.info.bounds + def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds + def paramVariance(implicit ctx: Context) = denot.variance // -------- Printing -------------------------------------------------------- diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index e989e42b7..a09039521 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -46,67 +46,16 @@ object TypeApplications { /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`? */ - def varianceConforms(tparam1: MemberBinding, tparam2: MemberBinding)(implicit ctx: Context): Boolean = - varianceConforms(tparam1.memberVariance, tparam2.memberVariance) + def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean = + varianceConforms(tparam1.paramVariance, tparam2.paramVariance) /** Doe the variances of type parameters `tparams1` conform to the variances * of corresponding type parameters `tparams2`? * This is only the case of `tparams1` and `tparams2` have the same length. */ - def variancesConform(tparams1: List[MemberBinding], tparams2: List[MemberBinding])(implicit ctx: Context): Boolean = + def variancesConform(tparams1: List[TypeParamInfo], tparams2: List[TypeParamInfo])(implicit ctx: Context): Boolean = tparams1.corresponds(tparams2)(varianceConforms) - def fallbackTypeParamsOLD(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = { - def memberBindings(vs: List[Int]): Type = vs match { - case Nil => NoType - case v :: vs1 => - RefinedType( - memberBindings(vs1), - tpnme.hkArgOLD(vs1.length), - TypeBounds.empty.withBindingKind(BindingKind.fromVariance(v))) - } - def decompose(t: Type, acc: List[MemberBinding]): List[MemberBinding] = t match { - case t: RefinedType => decompose(t.parent, t :: acc) - case NoType => acc - } - decompose(memberBindings(variances), Nil) - } - - /** Extractor for - * - * [v1 X1: B1, ..., vn Xn: Bn] -> T - * ==> - * ([X_i := this.$hk_i] T) { type v_i $hk_i: (new)B_i } - */ - object TypeLambdaOLD { - def apply(argBindingFns: List[RecType => TypeBounds], - bodyFn: RecType => Type)(implicit ctx: Context): Type = { - val argNames = argBindingFns.indices.toList.map(tpnme.hkArgOLD) - var idx = 0 - RecType.closeOver(rt => - (bodyFn(rt) /: argBindingFns) { (parent, argBindingFn) => - val res = RefinedType(parent, tpnme.hkArgOLD(idx), argBindingFn(rt)) - idx += 1 - res - }) - } - - def unapply(tp: Type)(implicit ctx: Context): Option[( /*List[Int], */ List[TypeBounds], Type)] = { - def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match { - case t @ RefinedType(p, rname, rinfo: TypeBounds) if t.isTypeParam => - decompose(p, rinfo.bounds :: acc) - case t: RecType => - decompose(t.parent, acc) - case _ => - (acc, t) - } - decompose(tp, Nil) match { - case (Nil, _) => None - case x => Some(x) - } - } - } - /** Extractor for * * [v1 X1: B1, ..., vn Xn: Bn] -> C[X1, ..., Xn] @@ -118,33 +67,14 @@ object TypeApplications { */ object EtaExpansion { def apply(tycon: Type)(implicit ctx: Context) = { - if (Config.newHK) assert(tycon.typeParams.nonEmpty, tycon) - else assert(tycon.isEtaExpandableOLD) + assert(tycon.typeParams.nonEmpty, tycon) tycon.EtaExpand(tycon.typeParamSymbols) } - def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = - if (Config.newHK) - tp match { - case tp @ TypeLambda(tparams, AppliedType(fn: TypeRef, args)) - if (args == tparams.map(_.toArg)) => Some(fn) - case _ => None - } - else { - def argsAreForwarders(args: List[Type], n: Int): Boolean = args match { - case Nil => - n == 0 - case TypeRef(RecThis(rt), sel) :: args1 if false => - rt.eq(tp) && sel == tpnme.hkArgOLD(n - 1) && argsAreForwarders(args1, n - 1) - case _ => - false - } - tp match { - case TypeLambdaOLD(argBounds, AppliedType(fn: TypeRef, args)) - if argsAreForwarders(args, tp.typeParams.length) => Some(fn) - case _ => None - } - } + def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = tp match { + case tp @ TypeLambda(tparams, AppliedType(fn: TypeRef, args)) if (args == tparams.map(_.toArg)) => Some(fn) + case _ => None + } } /** Extractor for type application T[U_1, ..., U_n]. This is the refined type @@ -165,13 +95,13 @@ object TypeApplications { refinements = rt :: refinements tycon = rt.parent.stripTypeVar } - def collectArgs(tparams: List[MemberBinding], + def collectArgs(tparams: List[TypeParamInfo], refinements: List[RefinedType], argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match { case Nil if tparams.isEmpty && argBuf.nonEmpty => Some((tycon, argBuf.toList)) case RefinedType(_, rname, rinfo) :: refinements1 - if tparams.nonEmpty && rname == tparams.head.memberName => + if tparams.nonEmpty && rname == tparams.head.paramName => collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo) case _ => None @@ -186,44 +116,16 @@ object TypeApplications { /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK */ - def etaExpandIfHK(tparams: List[MemberBinding], args: List[Type])(implicit ctx: Context): List[Type] = + def etaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] = if (tparams.isEmpty) args else { - def bounds(tparam: MemberBinding) = tparam match { + def bounds(tparam: TypeParamInfo) = tparam match { case tparam: Symbol => tparam.infoOrCompleter - case tparam: RefinedType if !Config.newHK => tparam.memberBounds - case tparam: LambdaParam => tparam.memberBounds + case tparam: LambdaParam => tparam.paramBounds } args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(bounds(tparam))) } - /** The references `.this.$hk0, ..., .this.$hk`. */ - def argRefsOLD(rt: RecType, n: Int)(implicit ctx: Context) = - List.range(0, n).map(i => RecThis(rt).select(tpnme.hkArgOLD(i))) - - private class InstMapOLD(fullType: Type)(implicit ctx: Context) extends TypeMap { - var localRecs: Set[RecType] = Set.empty - var keptRefs: Set[Name] = Set.empty - var tyconIsHK: Boolean = true - def apply(tp: Type): Type = tp match { - case tp @ TypeRef(RecThis(rt), sel) if sel.isHkArgNameOLD && localRecs.contains(rt) => - fullType.member(sel).info match { - case TypeAlias(alias) => apply(alias) - case _ => keptRefs += sel; tp - } - case tp: TypeVar if !tp.inst.exists => - val bounds = tp.instanceOpt.orElse(ctx.typeComparer.bounds(tp.origin)) - bounds.foreachPart { - case TypeRef(RecThis(rt), sel) if sel.isHkArgNameOLD && localRecs.contains(rt) => - keptRefs += sel - case _ => - } - tp - case _ => - mapOver(tp) - } - } - /** A type map that tries to reduce a (part of) the result type of the type lambda `tycon` * with the given `args`(some of which are wildcard arguments represented by type bounds). * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument @@ -281,7 +183,7 @@ class TypeApplications(val self: Type) extends AnyVal { * with the bounds on its hk args. See `LambdaAbstract`, where these * types get introduced, and see `isBoundedLambda` below for the test. */ - final def typeParams(implicit ctx: Context): List[MemberBinding] = /*>|>*/ track("typeParams") /*<|<*/ { + final def typeParams(implicit ctx: Context): List[TypeParamInfo] = /*>|>*/ track("typeParams") /*<|<*/ { self match { case self: ClassInfo => self.cls.typeParams @@ -293,8 +195,7 @@ class TypeApplications(val self: Type) extends AnyVal { else if (!tsym.isCompleting) tsym.info.typeParams else Nil case self: RefinedType => - val precedingParams = self.parent.typeParams.filterNot(_.memberName == self.refinedName) - if (self.isTypeParam) precedingParams :+ self else precedingParams + self.parent.typeParams.filterNot(_.paramName == self.refinedName) case self: RecType => self.parent.typeParams case _: HKApply | _: SingletonType => @@ -309,7 +210,7 @@ class TypeApplications(val self: Type) extends AnyVal { } /** If `self` is a higher-kinded type, its type parameters $hk_i, otherwise Nil */ - final def hkTypeParams(implicit ctx: Context): List[MemberBinding] = + final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] = if (isHK) typeParams else Nil /** If `self` is a generic class, its type parameter symbols, otherwise Nil */ @@ -384,7 +285,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Is self type higher-kinded (i.e. of kind != "*")? */ def isHK(implicit ctx: Context): Boolean = self.dealias match { case self: TypeRef => self.info.isHK - case self: RefinedType => !Config.newHK && self.isTypeParam + case self: RefinedType => false case self: TypeLambda => true case self: HKApply => false case self: SingletonType => false @@ -410,8 +311,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (!tsym.isCompleting || tsym.isAliasType) tsym.info.knownHK else 0 } - case self: RefinedType => - if (!Config.newHK && self.isTypeParam) 1 else -1 + case self: RefinedType => -1 case self: TypeLambda => 1 case self: HKApply => -1 case self: SingletonType => -1 @@ -423,12 +323,6 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => -1 } - /** is receiver a higher-kinded application? */ - def isHKApplyOLD(implicit ctx: Context): Boolean = self match { - case self @ RefinedType(_, name, _) => name.isHkArgNameOLD && !self.isTypeParam - case _ => false - } - /** True if it can be determined without forcing that the class symbol * of this application exists. Equivalent to * @@ -457,25 +351,6 @@ class TypeApplications(val self: Type) extends AnyVal { self } - /** Replace references to type parameters with references to hk arguments `this.$hk_i` - * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`. - */ - def recursifyOLD[T <: Type](tparams: List[MemberBinding])(implicit ctx: Context): RecType => T = - tparams match { - case (_: Symbol) :: _ => - (rt: RecType) => - new ctx.SafeSubstMap(tparams.asInstanceOf[List[Symbol]], argRefsOLD(rt, tparams.length)) - .apply(self).asInstanceOf[T] - case _ => - def mapRefs(rt: RecType) = new TypeMap { - def apply(t: Type): Type = t match { - case rthis: RecThis if tparams contains rthis.binder.parent => RecThis(rt) - case _ => mapOver(t) - } - } - mapRefs(_).apply(self).asInstanceOf[T] - } - /** Lambda abstract `self` with given type parameters. Examples: * * type T[X] = U becomes type T = [X] -> U @@ -485,124 +360,18 @@ class TypeApplications(val self: Type) extends AnyVal { */ def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { def expand(tp: Type) = - if (Config.newHK) - TypeLambda( - tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( - tl => tparams.map(tparam => tl.lifted(tparams, tparam.info).bounds), - tl => tl.lifted(tparams, tp)) - else - TypeLambdaOLD( - tparams.map(tparam => - tparam.memberBoundsAsSeenFrom(self) - .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .recursifyOLD(tparams)), - tp.recursifyOLD(tparams)) - + TypeLambda( + tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( + tl => tparams.map(tparam => tl.lifted(tparams, tparam.info).bounds), + tl => tl.lifted(tparams, tp)) assert(!isHK, self) - if (Config.newHK) self match { + self match { case self: TypeAlias => self.derivedTypeAlias(expand(self.alias)) case self @ TypeBounds(lo, hi) => self.derivedTypeBounds(lo, expand(hi)) case _ => expand(self) } - else self match { - case self: TypeAlias => - self.derivedTypeAlias(expand(self.alias.normalizeHkApplyOLD)) - case self @ TypeBounds(lo, hi) => - self.derivedTypeBounds(lo, expand(hi.normalizeHkApplyOLD)) - case _ => expand(self) - } - } - - /** If `self` is a * type, perform the following rewritings: - * - * 1. For every occurrence of `z.$hk_i`, where `z` is a RecThis type that refers - * to some recursive type in `self`, if the member of `self.hk$i` has an alias - * type `= U`: - * - * z.$hk_i --> U - * - * 2. For every top-level binding `type A = z.$hk_i$, where `z` is a RecThis type that refers - * to some recursive type in `self`, if the member of `self` has bounds `S..U`: - * - * type A = z.$hk_i --> type A >: S <: U - * - * 3. If the type constructor preceding all bindings is a * type, delete every top-level - * binding `{ type $hk_i ... }` where `$hk_i` does not appear in the prefix of the binding. - * I.e. - * - * T { type $hk_i ... } --> T - * - * If `$hk_i` does not appear in `T`. - * - * A binding is top-level if it can be reached by - * - * - following aliases unless the type is a LazyRef - * (need to keep cycle breakers around, see i974.scala) - * - dropping refinements and rec-types - * - going from a wildcard type to its upper bound - */ - def normalizeHkApplyOLD(implicit ctx: Context): Type = self.strictDealias match { - case self1 @ RefinedType(_, rname, _) if rname.isHkArgNameOLD && self1.typeParams.isEmpty => - val inst = new InstMapOLD(self) - - def instTop(tp: Type): Type = tp.strictDealias match { - case tp: RecType => - inst.localRecs += tp - tp.rebind(instTop(tp.parent)) - case tp @ RefinedType(parent, rname, rinfo) => - rinfo match { - case TypeAlias(TypeRef(RecThis(rt), sel)) if sel.isHkArgNameOLD && inst.localRecs.contains(rt) => - val bounds @ TypeBounds(_, _) = self.member(sel).info - instTop(tp.derivedRefinedType(parent, rname, bounds.withBindingKind(NoBinding))) - case _ => - val parent1 = instTop(parent) - if (rname.isHkArgNameOLD && - !inst.tyconIsHK && - !inst.keptRefs.contains(rname)) parent1 - else tp.derivedRefinedType(parent1, rname, inst(rinfo)) - } - case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => - tp.derivedWildcardType(bounds.derivedTypeBounds(inst(lo), instTop(hi))) - case tp: LazyRef => - instTop(tp.ref) - case tp => - inst.tyconIsHK = tp.isHK - inst(tp) - } - - def isLazy(tp: Type): Boolean = tp.strictDealias match { - case tp: RefinedOrRecType => isLazy(tp.parent) - case tp @ WildcardType(bounds @ TypeBounds(lo, hi)) => isLazy(hi) - case tp: LazyRef => true - case _ => false - } - - val reduced = - if (isLazy(self1)) { - // A strange dance is needed here to make 974.scala compile. - val res = LazyRef(() => instTop(self)) - res.ref // without this line, pickling 974.scala fails with an assertion error - // saying that we address a RecThis outside its Rec (in the case of RecThis of pickleNewType) - res // without this line, typing 974.scala gives a stackoverflow in asSeenFrom. - } - else instTop(self) - if (reduced ne self) { - hk.println(i"reduce $self --> $reduced / ${inst.tyconIsHK}") - //hk.println(s"reduce $self --> $reduced") - } - reduced - case _ => self - } - - /** A type ref is eta expandable if it refers to a non-lambda class. - * In that case we can look for parameterized base types of the type - * to eta expand them. - */ - def isEtaExpandableOLD(implicit ctx: Context) = self match { - case self: TypeRef => self.symbol.isClass - case _ => false } /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn` @@ -681,23 +450,12 @@ class TypeApplications(val self: Type) extends AnyVal { else { def adaptArg(arg: Type): Type = arg match { case arg @ TypeLambda(tparams, body) if - !tparams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && + !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) && tparams.corresponds(hkParams)(varianceConforms) => - TypeLambda(tparams.map(_.memberName), hkParams.map(_.memberVariance))( + TypeLambda(tparams.map(_.paramName), hkParams.map(_.paramVariance))( tl => arg.paramBounds.map(_.subst(arg, tl).bounds), tl => arg.resultType.subst(arg, tl) ) - case arg @ TypeLambdaOLD(tparamBounds, body) if - !arg.typeParams.corresponds(hkParams)(_.memberVariance == _.memberVariance) && - arg.typeParams.corresponds(hkParams)(varianceConforms) => - def adjustVariance(bounds: TypeBounds, tparam: MemberBinding): TypeBounds = - bounds.withBindingKind(BindingKind.fromVariance(tparam.memberVariance)) - def lift[T <: Type](tp: T): (RecType => T) = arg match { - case rt0: RecType => tp.subst(rt0, _).asInstanceOf[T] - case _ => (x => tp) - } - val adjusted = (tparamBounds, hkParams).zipped.map(adjustVariance) - TypeLambdaOLD(adjusted.map(lift), lift(body)) case arg @ TypeAlias(alias) => arg.derivedTypeAlias(adaptArg(alias)) case arg @ TypeBounds(lo, hi) => @@ -731,16 +489,6 @@ class TypeApplications(val self: Type) extends AnyVal { self.instantiate(args) case EtaExpansion(self1) => self1.appliedTo(args) - case TypeLambdaOLD(_, body) if !args.exists(_.isInstanceOf[TypeBounds]) => - def substHkArgs = new TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(RecThis(rt), name) if rt.eq(self) && name.isHkArgNameOLD => - args(name.hkArgIndexOLD) - case _ => - mapOver(tp) - } - } - substHkArgs(body) case self1: WildcardType => self1 case _ => @@ -753,12 +501,12 @@ class TypeApplications(val self: Type) extends AnyVal { * @param args = `U1, ..., Un` * @param tparams are assumed to be the type parameters of `T`. */ - final def appliedTo(args: List[Type], typParams: List[MemberBinding])(implicit ctx: Context): Type = { - def matchParams(t: Type, tparams: List[MemberBinding], args: List[Type])(implicit ctx: Context): Type = args match { + final def appliedTo(args: List[Type], typParams: List[TypeParamInfo])(implicit ctx: Context): Type = { + def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match { case arg :: args1 => try { val tparam :: tparams1 = tparams - matchParams(RefinedType(t, tparam.memberName, arg.toBounds(tparam)), tparams1, args1) + matchParams(RefinedType(t, tparam.paramName, arg.toBounds(tparam)), tparams1, args1) } catch { case ex: MatchError => println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG @@ -790,12 +538,7 @@ class TypeApplications(val self: Type) extends AnyVal { case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => HKApply(self, args) case dealiased => - matchParams(dealiased, typParams, args) match { - case refined @ RefinedType(_, pname, _) if !Config.newHK && pname.isHkArgNameOLD => - refined.betaReduceOLD - case refined => - refined - } + matchParams(dealiased, typParams, args) } } @@ -810,34 +553,21 @@ class TypeApplications(val self: Type) extends AnyVal { * up hk type parameters matching the arguments. This is needed when unpickling * Scala2 files such as `scala.collection.generic.Mapfactory`. */ - final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = - if (Config.newHK) - self match { - case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => - HKApply(self, args) - case _ => - appliedTo(args, typeParams) - } - else { - val safeTypeParams = self match { - case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => - // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC - ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant") - fallbackTypeParamsOLD(args map alwaysZero) - case _ => - typeParams - } - appliedTo(args, safeTypeParams) - } + final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = self match { + case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => + HKApply(self, args) + case _ => + appliedTo(args, typeParams) + } /** Turn this type, which is used as an argument for * type parameter `tparam`, into a TypeBounds RHS */ - final def toBounds(tparam: MemberBinding)(implicit ctx: Context): TypeBounds = self match { + final def toBounds(tparam: TypeParamInfo)(implicit ctx: Context): TypeBounds = self match { case self: TypeBounds => // this can happen for wildcard args self case _ => - val v = tparam.memberVariance + val v = tparam.paramVariance /* Not neeeded. if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self) else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 38b45b2b0..55a964ee9 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -364,16 +364,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => - if (tp2.isTypeParam) { - compareHkLambdaOLD(tp1, tp2) || - fourthTry(tp1, tp2) - } - else { - compareHkApplyOLD(tp2, tp1, inOrder = false) || - compareRefinedSlow || - fourthTry(tp1, tp2) || - compareAliasedRefined(tp2, tp1, inOrder = false) - } + compareRefinedSlow || + fourthTry(tp1, tp2) || + compareAliasedRefined(tp2, tp1, inOrder = false) // @@@ still needed? } else // fast path, in particular for refinements resulting from parameterization. isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) && @@ -399,7 +392,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // so the bounds checking should look like this: // // tparams1.corresponds(tparams2)((tparam1, tparam2) => - // isSubType(tparam2.memberBounds.subst(tp2, tp1), tparam1.memberBounds)) + // isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds)) // // But that would invalidate a pattern such as // `[X0 <: Number] -> Number <:< [X0] -> Any` @@ -531,7 +524,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - compareHkApplyOLD(tp1, tp2, inOrder = true) || isNewSubType(tp1.parent, tp2) || compareAliasedRefined(tp1, tp2, inOrder = true) case tp1: RecType => @@ -693,10 +685,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Subtype test for corresponding arguments in `args1`, `args2` according to * variances in type parameters `tparams`. */ - def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[MemberBinding]): Boolean = + def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): Boolean = if (args1.isEmpty) args2.isEmpty else args2.nonEmpty && { - val v = tparams.head.memberVariance + val v = tparams.head.paramVariance (v > 0 || isSubType(args2.head, args1.head)) && (v < 0 || isSubType(args1.head, args2.head)) } @@ -706,7 +698,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * - the type parameters of `B` match one-by-one the variances of `tparams`, * - `B` satisfies predicate `p`. */ - private def testLifted(tp1: Type, tp2: Type, tparams: List[MemberBinding], p: Type => Boolean): Boolean = { + private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = { val classBounds = tp2.classSymbols def recur(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => @@ -722,109 +714,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { recur(tp1.baseClasses) } - /** Handle subtype tests - * - * app <:< other if inOrder = true - * other <:< app if inOrder = false - * - * where `app` is an hk application but `other` is not. - * - * As a first step, if `app` appears on the right, try to normalize it using - * `normalizeHkApply`, if that gives a different type proceed with a regular subtype - * test using that type instead of `app`. - * - * Otherwise, if `app` has constrainable poly param as type constructor, - * perform the following steps: - * - * (1) If not `inOrder` then perform the next steps until they all succeed - * for each base type of other which - * - derives from a class bound of `app`, - * - has the same number of type parameters as `app` - * - has type parameter variances which conform to those of `app`. - * If `inOrder` then perform the same steps on the original `other` type. - * - * (2) Try to eta expand the constructor of `other`. - * - * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter - * by the eta expansion of step (2) reapplied to the projection's arguments. - * (3b) In normal mode, try to unify the projection's hk constructor parameter with - * the eta expansion of step(2) - * - * (4) If `inOrder`, test `app <: other` else test `other <: app`. - */ - def compareHkApplyOLD(app: RefinedType, other: Type, inOrder: Boolean): Boolean = { - def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($app, $other, inOrder = $inOrder, constr = $tp)", subtyping) { - tp match { - case tp: TypeVar => tryInfer(tp.underlying) - case param: PolyParam if canConstrain(param) => - - def unifyWith(liftedOther: Type): Boolean = { - subtyping.println(i"unify with $liftedOther") - liftedOther.typeConstructor.widen match { - case tycon: TypeRef if tycon.isEtaExpandableOLD && tycon.typeParams.nonEmpty => - val (ok, app1) = - if (ctx.mode.is(Mode.TypevarsMissContext)) - (true, EtaExpansion(tycon).appliedTo(app.argInfos)) - else - (tryInstantiate(param, EtaExpansion(tycon)), app) - ok && - (if (inOrder) isSubType(app1, other) else isSubType(other, app1)) - case _ => - false - } - } - val hkTypeParams = param.typeParams - subtyping.println(i"classBounds = ${app.classSymbols}") - subtyping.println(i"base classes = ${other.baseClasses}") - subtyping.println(i"type params = $hkTypeParams, ${app.classSymbol}") - if (inOrder) unifyWith(other) - else testLifted(other, app, hkTypeParams, unifyWith) - case _ => - // why only handle the case where one of the sides is a typevar or poly param? - // If the LHS is a hk application, then the normal logic already handles - // all other cases. Indeed, say you have - // - // type C[T] <: List[T] - // - // where C is an abstract type. Then to verify `C[Int] <: List[Int]`, - // use compareRefinedslow to get `C <: List` and verify that - // - // C#List$T = C$$hk0 = Int - // - // If the RHS is a hk application, we can also go through - // the normal logic because lower bounds are not parameterized. - // If were to re-introduce parameterized lower bounds of hk types - // we'd have to add some logic to handle them here. - false - } - } - app.isHKApplyOLD && !other.isHKApplyOLD && { - val reduced = if (inOrder) app else app.normalizeHkApplyOLD - if (reduced ne app) - if (inOrder) isSubType(reduced, other) else isSubType(other, reduced) - else tryInfer(app.typeConstructor.dealias) - } - } - - /** Compare type lambda with non-lambda type. */ - def compareHkLambdaOLD(tp1: Type, tp2: RefinedType): Boolean = tp1.stripTypeVar match { - case TypeLambdaOLD(args1, body1) => - //println(i"comparing $tp1 <:< $tp2") - tp2 match { - case TypeLambdaOLD(args2, body2) => - args1.corresponds(args2)((arg1, arg2) => - varianceConforms(BindingKind.toVariance(arg1.bindingKind), - BindingKind.toVariance(arg2.bindingKind))) && - // don't compare bounds; it would go in the wrong sense anyway. - isSubType(body1, body2) - case _ => false - } - case RefinedType(parent1, _, _) => - compareHkLambdaOLD(parent1, tp2) - case _ => - false - } - /** Say we are comparing a refined type `P{type M = U}` or `P{type M >: L <: U}`. * If P#M refers to a BaseTypeArg aliased to some other typeref P#N, * do the same comparison with `P{type N = U}` or `P{type N >: L <: U}`, respectively. @@ -1324,45 +1213,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = { val tparams1 = tp1.typeParams val tparams2 = tp2.typeParams - if (!Config.newHK && tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2) - else if (Config.newHK && tparams1.isEmpty) + if (tparams1.isEmpty) if (tparams2.isEmpty) op(tp1, tp2) - else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.memberBoundsAsSeenFrom(tp2)))) - else if (Config.newHK && tparams2.isEmpty) - original(tp1.appliedTo(tp1.typeParams.map(_.memberBoundsAsSeenFrom(tp1))), tp2) - else if (!Config.newHK && (tparams1.isEmpty || tparams2.isEmpty)) op(tp1, tp2) - else if (!Config.newHK && tparams1.length != tparams2.length) mergeConflict(tp1, tp2) - else if (Config.newHK) { + else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2)))) + else if (tparams2.isEmpty) + original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2) + else { val numArgs = tparams1.length def argRefs(tl: GenericType) = List.range(0, numArgs).map(PolyParam(tl, _)) TypeLambda( paramNames = tpnme.syntheticLambdaParamNames(numArgs), variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) => - (tparam1.memberVariance + tparam2.memberVariance) / 2))( + (tparam1.paramVariance + tparam2.paramVariance) / 2))( paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) => - tl.lifted(tparams1, tparam1.memberBoundsAsSeenFrom(tp1)).bounds & - tl.lifted(tparams2, tparam2.memberBoundsAsSeenFrom(tp2)).bounds), + tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds & + tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds), resultTypeExp = tl => original(tl.lifted(tparams1, tp1).appliedTo(argRefs(tl)), tl.lifted(tparams2, tp2).appliedTo(argRefs(tl)))) } - else { - val bindings: List[RecType => TypeBounds] = - (tparams1, tparams2).zipped.map { (tparam1, tparam2) => - val b1: RecType => TypeBounds = - tparam1.memberBoundsAsSeenFrom(tp1).recursifyOLD(tparams1) - val b2: RecType => TypeBounds = - tparam2.memberBoundsAsSeenFrom(tp2).recursifyOLD(tparams2) - (rt: RecType) => (b1(rt) & b2(rt)) - .withBindingKind( - BindingKind.fromVariance( - (tparam1.memberVariance + tparam2.memberVariance) / 2)) - } - val app1: RecType => Type = rt => tp1.appliedTo(argRefsOLD(rt, tparams1.length)) - val app2: RecType => Type = rt => tp2.appliedTo(argRefsOLD(rt, tparams2.length)) - val body: RecType => Type = rt => op(app1(rt), app2(rt)) - TypeLambdaOLD(bindings, body) - } } /** Try to distribute `&` inside type, detect and handle conflicts @@ -1652,19 +1521,5 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } } - override def compareHkApplyOLD(app: RefinedType, other: Type, inOrder: Boolean) = - if (app.isHKApplyOLD) - traceIndented(i"compareHkApply $app, $other, $inOrder, ${app.normalizeHkApplyOLD}") { - super.compareHkApplyOLD(app, other, inOrder) - } - else super.compareHkApplyOLD(app, other, inOrder) - - override def compareHkLambdaOLD(tp1: Type, tp2: RefinedType): Boolean = - if (tp2.isTypeParam) - traceIndented(i"compareHkLambda $tp1, $tp2") { - super.compareHkLambdaOLD(tp1, tp2) - } - else super.compareHkLambdaOLD(tp1, tp2) - override def toString = "Subtype trace:" + { try b.toString finally b.clear() } } diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 0d02de1da..3a797cce3 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -158,7 +158,6 @@ trait TypeOps { this: Context => // TODO: Make standalone object. tp case tp: RefinedType => tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap)) - .normalizeHkApplyOLD case tp: TypeAlias => tp.derivedTypeAlias(simplify(tp.alias, theMap)) case AndType(l, r) => @@ -386,7 +385,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. // Strip all refinements from parent type, populating `refinements` and `formals` maps. def normalizeToRef(tp: Type): TypeRef = { def fail = throw new TypeError(s"unexpected parent type: $tp") - tp.dealias.normalizeHkApplyOLD match { + tp.dealias match { case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName, rinfo) => diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala new file mode 100644 index 000000000..ff3c8fca7 --- /dev/null +++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala @@ -0,0 +1,29 @@ +package dotty.tools.dotc.core + +import Names.Name +import Contexts.Context +import Types.{Type, TypeBounds} + +/** A common super trait of Symbol and LambdaParam. + * Used to capture the attributes of type parameters which can be implemented as either. + */ +trait TypeParamInfo { + + /** Is this the info of a type parameter? Might be wrong for symbols */ + def isTypeParam(implicit ctx: Context): Boolean + + /** The name of the type parameter */ + def paramName(implicit ctx: Context): Name + + /** The info of the type parameter */ + def paramBounds(implicit ctx: Context): TypeBounds + + /** The info of the type parameter as seen from a prefix type. + * This can be different from `memberInfo` if the binding + * is a type symbol of a class. + */ + def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds + + /** The variance of the type parameter */ + def paramVariance(implicit ctx: Context): Int +} \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index c9d2b5029..9150925ff 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -116,12 +116,8 @@ object Types { case TypeAlias(tp) => tp.isRef(sym) case _ => this1.symbol eq sym } - case this1: RefinedType => - !this1.isTypeParam && this1.parent.isRef(sym) - case this1: RecType => - this1.parent.isRef(sym) - case _ => - false + case this1: RefinedOrRecType => this1.parent.isRef(sym) + case _ => false } /** Is this type a (neither aliased nor applied) reference to class `sym`? */ @@ -939,7 +935,7 @@ object Types { tp.underlying.underlyingClassRef(refinementOK) case tp: RefinedType => def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName) - if (refinementOK || tp.isTypeParam || isParamName) tp.underlying.underlyingClassRef(refinementOK) + if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK) else NoType case tp: RecType => tp.underlying.underlyingClassRef(refinementOK) @@ -2099,8 +2095,7 @@ object Types { * @param infoFn: A function that produces the info of the refinement declaration, * given the refined type itself. */ - abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) - extends RefinedOrRecType with BindingType with MemberBinding { + abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType { override def underlying(implicit ctx: Context) = parent @@ -2111,42 +2106,6 @@ object Types { this } - def betaReduceOLD(implicit ctx: Context): Type = refinedInfo match { - case TypeAlias(alias) if refinedName.isHkArgNameOLD => - def instantiate(rt: RecType) = new TypeMap { - def apply(t: Type) = t match { - case TypeRef(RecThis(`rt`), `refinedName`) => alias - case tp: TypeRef => - val pre1 = apply(tp.prefix) - if (pre1 ne tp.prefix) tp.newLikeThis(pre1) else tp - case _ => mapOver(t) - } - } - def substAlias(tp: Type): Type = tp.safeDealias match { - case tp @ RefinedType(p, rname, rinfo) if tp.isTypeParam => - if (rname == refinedName) p // check bounds? - else tp.derivedRefinedType(substAlias(p), rname, rinfo) - case tp: RecType => - val p1 = substAlias(tp.parent) - if (p1 ne tp.parent) tp.rebind(instantiate(tp)(p1)) - else tp - case _ => - tp - } - parent match { - case parent: LazyRef => - LazyRef(() => derivedRefinedType(parent.ref, refinedName, refinedInfo)) - case _ => - val reduced = substAlias(parent) - if (reduced ne parent) { - hk.println(i"REDUCE $this ----> ${reduced}") - reduced - } else this - } - case _ => - this - } - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this else { @@ -2156,7 +2115,7 @@ object Types { // A Y-check error (incompatible types involving hk lambdas) for dotty itself. // TODO: investigate and, if possible, drop after revision. val normalizedRefinedInfo = refinedInfo.substRecThis(dummyRec, dummyRec) - RefinedType(parent, refinedName, normalizedRefinedInfo).betaReduceOLD + RefinedType(parent, refinedName, normalizedRefinedInfo) } /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ @@ -2164,17 +2123,6 @@ object Types { if (parent.member(refinedName).exists) derivedRefinedType(parent, refinedName, refinedInfo) else parent - // MemberBinding methods - // TODO: Needed? - def isTypeParam(implicit ctx: Context) = refinedInfo match { - case tp: TypeBounds => tp.isBinding - case _ => false - } - def memberName(implicit ctx: Context) = refinedName - def memberBounds(implicit ctx: Context) = refinedInfo.bounds - def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = memberBounds - def memberVariance(implicit ctx: Context) = BindingKind.toVariance(refinedInfo.bounds.bindingKind) - override def equals(that: Any) = that match { case that: RefinedType => this.parent == that.parent && @@ -2623,7 +2571,7 @@ object Types { def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): GenericType - def lifted(tparams: List[MemberBinding], t: Type)(implicit ctx: Context): Type = + def lifted(tparams: List[TypeParamInfo], t: Type)(implicit ctx: Context): Type = tparams match { case LambdaParam(poly, _) :: _ => t.subst(poly, this) @@ -2710,12 +2658,12 @@ object Types { } /** The parameter of a type lambda */ - case class LambdaParam(tl: TypeLambda, n: Int) extends MemberBinding { + case class LambdaParam(tl: TypeLambda, n: Int) extends TypeParamInfo { def isTypeParam(implicit ctx: Context) = true - def memberName(implicit ctx: Context): TypeName = tl.paramNames(n) - def memberBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n) - def memberBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = memberBounds - def memberVariance(implicit ctx: Context): Int = tl.variances(n) + def paramName(implicit ctx: Context): TypeName = tl.paramNames(n) + def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n) + def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds + def paramVariance(implicit ctx: Context): Int = tl.variances(n) def toArg: Type = PolyParam(tl, n) } @@ -2756,7 +2704,7 @@ object Types { case _ => defn.AnyType } - def typeParams(implicit ctx: Context): List[MemberBinding] = { + def typeParams(implicit ctx: Context): List[TypeParamInfo] = { val tparams = tycon.typeParams if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams } @@ -3574,10 +3522,10 @@ object Types { if (inst.exists) apply(inst) else tp case tp: HKApply => - def mapArg(arg: Type, tparam: MemberBinding): Type = { + def mapArg(arg: Type, tparam: TypeParamInfo): Type = { val saved = variance - if (tparam.memberVariance < 0) variance = -variance - else if (tparam.memberVariance == 0) variance = 0 + if (tparam.paramVariance < 0) variance = -variance + else if (tparam.paramVariance == 0) variance = 0 try this(arg) finally variance = saved } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index f604bff62..6a51b9642 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -257,7 +257,7 @@ class TreePickler(pickler: TastyPickler) { case tpe: TypeLambda => writeByte(LAMBDAtype) val paramNames = tpe.typeParams.map(tparam => - varianceToPrefix(tparam.memberVariance) +: tparam.memberName) + varianceToPrefix(tparam.paramVariance) +: tparam.paramName) pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds) case tpe: MethodType if richTypes => writeByte(METHODtype) diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 880804b9e..656650d91 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -51,8 +51,6 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2) - case tp: RefinedType if !Config.newHK => - tp.normalizeHkApplyOLD case tp: SkolemType => homogenize(tp.info) case tp: LazyRef => diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index c772267e7..34456d0b9 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -113,36 +113,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (defn.isFunctionClass(cls)) return toTextFunction(args) if (defn.isTupleClass(cls)) return toTextTuple(args) return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close - case tp @ TypeLambdaOLD(argBoundss, body) => - val variances = argBoundss.map(b => BindingKind.toVariance(b.bindingKind)) - val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar - val paramNames = argBoundss.indices.toList.map(prefix.toString + _) - val instantiate = new TypeMap { - def contains(tp1: Type, tp2: Type): Boolean = - tp1.eq(tp2) || { - tp1.stripTypeVar match { - case tp1: RefinedOrRecType => contains(tp1.parent, tp2) - case _ => false - } - } - def apply(t: Type): Type = t match { - case TypeRef(RecThis(rt), name) if name.isHkArgNameOLD && contains(tp, rt) => - // Make up a name that prints as "Xi". Need to be careful we do not - // accidentally unique-hash to something else. That's why we can't - // use prefix = NoPrefix or a WithFixedSym instance. - TypeRef.withSymAndName( - defn.EmptyPackageClass.thisType, defn.AnyClass, - paramNames(name.hkArgIndexOLD).toTypeName) - case _ => - mapOver(t) - } - } - val instArgs = argBoundss.map(instantiate).asInstanceOf[List[TypeBounds]] - val instBody = instantiate(body).dropAlias - lambdaNestingLevel += 1 - try - return typeLambdaText(paramNames, variances, instArgs, instBody) - finally lambdaNestingLevel -=1 case tp: TypeRef => val hideType = tp.symbol is AliasPreferred if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) { diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index e26ea1138..0344ae6c6 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -4,10 +4,9 @@ package typer import core._ import ast._ -import config.Config.newHK import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._ import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._ -import TypeApplications.{AppliedType, TypeLambdaOLD} +import TypeApplications.AppliedType import util.Positions._ import config.Printers._ import ast.Trees._ @@ -99,7 +98,7 @@ trait TypeAssigner { val base = apply(tycon) var args = tp.baseArgInfos(base.typeSymbol) if (base.typeParams.length != args.length) - args = base.typeParams.map(_.memberBounds) + args = base.typeParams.map(_.paramBounds) base.appliedTo(args) case tp @ RefinedType(parent, name, rinfo) if variance > 0 => val parent1 = apply(tp.parent) @@ -413,7 +412,7 @@ trait TypeAssigner { def refineNamed(tycon: Type, arg: Tree) = arg match { case ast.Trees.NamedArg(name, argtpt) => // Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error - val tparam = tparams.find(_.memberName == name) match { + val tparam = tparams.find(_.paramName == name) match { case Some(tparam) => tparam case none => ntparams.find(_.name == name).getOrElse(NoSymbol) } @@ -430,16 +429,7 @@ trait TypeAssigner { } def assignType(tree: untpd.TypeLambdaTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = - if (newHK) tree.withType(TypeLambda.fromSymbols(tparamDefs.map(_.symbol), body.tpe)) - else { - val tparams = tparamDefs.map(_.symbol) - val argBindingFns = tparams.map(tparam => - tparam.info.bounds - .withBindingKind(BindingKind.fromVariance(tparam.variance)) - .recursifyOLD(tparams)) - val bodyFn = body.tpe.recursifyOLD(tparams) - tree.withType(TypeLambdaOLD(argBindingFns, bodyFn)) - } + tree.withType(TypeLambda.fromSymbols(tparamDefs.map(_.symbol), body.tpe)) def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = tree.withType(ExprType(result.tpe)) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 49d69f04e..da176427a 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -942,10 +942,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) args = args.take(tparams.length) } - def typedArg(arg: untpd.Tree, tparam: MemberBinding) = { + def typedArg(arg: untpd.Tree, tparam: TypeParamInfo) = { val (desugaredArg, argPt) = if (ctx.mode is Mode.Pattern) - (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.memberBounds) + (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.paramBounds) else (arg, WildcardType) typed(desugaredArg, argPt) diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala index a2f9a0f16..a8abe5e30 100644 --- a/src/dotty/tools/dotc/typer/Variances.scala +++ b/src/dotty/tools/dotc/typer/Variances.scala @@ -84,11 +84,11 @@ object Variances { case ExprType(restpe) => varianceInType(restpe)(tparam) case tp @ HKApply(tycon, args) => - def varianceInArgs(v: Variance, args: List[Type], tparams: List[MemberBinding]): Variance = + def varianceInArgs(v: Variance, args: List[Type], tparams: List[TypeParamInfo]): Variance = args match { case arg :: args1 => varianceInArgs( - v & compose(varianceInType(arg)(tparam), tparams.head.memberVariance), + v & compose(varianceInType(arg)(tparam), tparams.head.paramVariance), args1, tparams.tail) case nil => v -- cgit v1.2.3 From ae360e93ad7f657992fc305e1b0755ef3ff0f166 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 20:26:08 +0200 Subject: Handle signatures over uninstantiated type variables Taking the signature over a type with uninstantiated type variables means that the signature can change later, once we instantiate the type variable. We handle this by recording uninstantiated positions of signatures and fixing them in PostTyper, when type variables are instantiated. - This allows to drop the kludge of "normalizing" in derivedRefinedType Dropping this initially revealed the problems with under-determined signatures. Now that these problems are fixed, we can drop for good. --- src/dotty/tools/dotc/core/Signature.scala | 20 ++++++++++++++++++++ src/dotty/tools/dotc/core/StdNames.scala | 2 ++ src/dotty/tools/dotc/core/TypeErasure.scala | 3 +++ src/dotty/tools/dotc/core/Types.scala | 12 ++---------- src/dotty/tools/dotc/transform/PostTyper.scala | 15 +++++++++++++-- 5 files changed, 40 insertions(+), 12 deletions(-) diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala index 54771bae5..984315f18 100644 --- a/src/dotty/tools/dotc/core/Signature.scala +++ b/src/dotty/tools/dotc/core/Signature.scala @@ -29,6 +29,19 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { /** Does this signature coincide with that signature on their parameter parts? */ final def sameParams(that: Signature): Boolean = this.paramsSig == that.paramsSig + /** Does this signature coincide with that signature on their parameter parts? + * This is the case if all parameter names are _consistent_, i.e. they are either + * equal or on of them is tpnme.Uninstantiated. + */ + final def consistentParams(that: Signature): Boolean = { + def consistent(name1: TypeName, name2: TypeName) = + name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated + def loop(names1: List[TypeName], names2: List[TypeName]): Boolean = + if (names1.isEmpty) names2.isEmpty + else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail) + loop(this.paramsSig, that.paramsSig) + } + /** The degree to which this signature matches `that`. * If both parameter and result type names match (i.e. they are the same * or one is a wildcard), the result is `FullMatch`. @@ -52,6 +65,13 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context) = Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig) + /** A signature is under-defined if its paramsSig part contains at least one + * `tpnme.Uninstantited`. Under-defined signatures arise when taking a signature + * of a type that still contains uninstantiated type variables. They are eliminated + * by `fixSignature` in `PostTyper`. + */ + def isUnderDefined(implicit ctx: Context) = + paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated } object Signature { diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala index 778d13cab..f9ede23c5 100644 --- a/src/dotty/tools/dotc/core/StdNames.scala +++ b/src/dotty/tools/dotc/core/StdNames.scala @@ -743,6 +743,8 @@ object StdNames { (0 until num).map(syntheticLambdaParamName)(breakOut) final val Conforms = encode("<:<") + + final val Uninstantiated: TypeName = "?$" } abstract class JavaNames[N <: Name] extends DefinedNames[N] { diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index c71726a3e..74d2d193f 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -476,6 +476,9 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean sigName(tp.widen) case ExprType(rt) => sigName(defn.FunctionOf(Nil, rt)) + case tp: TypeVar => + val inst = tp.instanceOpt + if (inst.exists) sigName(inst) else tpnme.Uninstantiated case tp: TypeProxy => sigName(tp.underlying) case ErrorType | WildcardType => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 9150925ff..d78bbd49e 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2108,15 +2108,7 @@ object Types { def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this - else { - // `normalizedRefinedInfo` is `refinedInfo` reduced everywhere via `reduceProjection`. - // (this is achieved as a secondary effect of substRecThis). - // It turns out this normalization is now needed; without it there's - // A Y-check error (incompatible types involving hk lambdas) for dotty itself. - // TODO: investigate and, if possible, drop after revision. - val normalizedRefinedInfo = refinedInfo.substRecThis(dummyRec, dummyRec) - RefinedType(parent, refinedName, normalizedRefinedInfo) - } + else RefinedType(parent, refinedName, refinedInfo) /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */ def wrapIfMember(parent: Type)(implicit ctx: Context): Type = @@ -2338,7 +2330,7 @@ object Types { final override def signature(implicit ctx: Context): Signature = { if (ctx.runId != mySignatureRunId) { mySignature = computeSignature - mySignatureRunId = ctx.runId + if (!mySignature.isUnderDefined) mySignatureRunId = ctx.runId } mySignature } diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala index 6b0b2b073..057026a67 100644 --- a/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/src/dotty/tools/dotc/transform/PostTyper.scala @@ -115,6 +115,17 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran } } + /** If the type of `tree` is a TermRefWithSignature with an underdefined + * signature, narrow the type by re-computing the signature (which should + * be fully-defined by now). + */ + private def fixSignature[T <: Tree](tree: T)(implicit ctx: Context): T = tree.tpe match { + case tpe: TermRefWithSignature if tpe.signature.isUnderDefined => + println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}") + tree.withType(TermRef.withSig(tpe.prefix, tpe.name, tpe.widen.signature)).asInstanceOf[T] + case _ => tree + } + class PostTyperTransformer extends Transformer { private var inJavaAnnot: Boolean = false @@ -192,10 +203,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran case tree: Ident => tree.tpe match { case tpe: ThisType => This(tpe.cls).withPos(tree.pos) - case _ => paramFwd.adaptRef(tree) + case _ => paramFwd.adaptRef(fixSignature(tree)) } case tree: Select => - transformSelect(paramFwd.adaptRef(tree), Nil) + transformSelect(paramFwd.adaptRef(fixSignature(tree)), Nil) case tree: TypeApply => val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree) Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType]) -- cgit v1.2.3 From 6abde38668acd76cb8b4ef15b62d6015938f483c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 17:34:29 +0200 Subject: Get rid of BindingKind Under direct hk encoding this is no longer needed. Also, drop now redundant pieces of widenForMatchSelector. --- src/dotty/tools/dotc/core/Types.scala | 70 +++++----------------- src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 2 +- src/dotty/tools/dotc/core/tasty/TreePickler.scala | 1 - .../tools/dotc/core/tasty/TreeUnpickler.scala | 5 +- src/dotty/tools/dotc/typer/Inferencing.scala | 4 -- 5 files changed, 17 insertions(+), 65 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index d78bbd49e..a2b52d338 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -3119,25 +3119,20 @@ object Types { unique(new CachedClassInfo(prefix, cls, classParents, decls, selfInfo)) } - /** Type bounds >: lo <: hi - * @param bindingKind: If != NoBinding, it indicates that this is - * an introduction of a higher-kinded type parameter. - * In that case it also defines the variance of the parameter. - */ - abstract case class TypeBounds(lo: Type, hi: Type)(val bindingKind: BindingKind) extends CachedProxyType with TypeType { + /** Type bounds >: lo <: hi */ + abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType { assert(lo.isInstanceOf[TermType]) assert(hi.isInstanceOf[TermType]) def variance: Int = 0 - def isBinding = bindingKind != NoBinding override def underlying(implicit ctx: Context): Type = hi /** The non-alias type bounds type with given bounds */ - def derivedTypeBounds(lo: Type, hi: Type, bk: BindingKind = this.bindingKind)(implicit ctx: Context) = - if ((lo eq this.lo) && (hi eq this.hi) && (bk == this.bindingKind) && (variance == 0)) this - else TypeBounds(lo, hi, bk) + def derivedTypeBounds(lo: Type, hi: Type)(implicit ctx: Context) = + if ((lo eq this.lo) && (hi eq this.hi) && (variance == 0)) this + else TypeBounds(lo, hi) /** If this is an alias, a derived alias with the new variance, * Otherwise the type itself. @@ -3147,13 +3142,6 @@ object Types { case _ => this } - def withBindingKind(bk: BindingKind)(implicit ctx: Context) = this match { - case tp: TypeAlias => assert(bk == NoBinding); this - case _ => derivedTypeBounds(lo, hi, bk) - } - - //def checkBinding: this.type = { assert(isBinding); this } - def contains(tp: Type)(implicit ctx: Context): Boolean = tp match { case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi case tp: ClassInfo => @@ -3166,12 +3154,12 @@ object Types { def & (that: TypeBounds)(implicit ctx: Context): TypeBounds = if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) that else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) this - else TypeBounds(this.lo | that.lo, this.hi & that.hi, this.bindingKind join that.bindingKind) + else TypeBounds(this.lo | that.lo, this.hi & that.hi) def | (that: TypeBounds)(implicit ctx: Context): TypeBounds = if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) this else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) that - else TypeBounds(this.lo & that.lo, this.hi | that.hi, this.bindingKind join that.bindingKind) + else TypeBounds(this.lo & that.lo, this.hi | that.hi) override def & (that: Type)(implicit ctx: Context) = that match { case that: TypeBounds => this & that @@ -3191,25 +3179,22 @@ object Types { /** If this type and that type have the same variance, this variance, otherwise 0 */ final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2 - override def computeHash = doHash(variance * 41 + bindingKind.n, lo, hi) + override def computeHash = doHash(variance, lo, hi) override def equals(that: Any): Boolean = that match { case that: TypeBounds => (this.lo eq that.lo) && (this.hi eq that.hi) && - (this.variance == that.variance) && (this.bindingKind == that.bindingKind) + (this.variance == that.variance) case _ => false } - override def toString = { - def bkString = if (isBinding) s"|v=${BindingKind.toVariance(bindingKind)}" else "" - if (lo eq hi) s"TypeAlias($lo, $variance)" - else s"TypeBounds($lo, $hi$bkString)" - } + override def toString = + if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)" } - class RealTypeBounds(lo: Type, hi: Type, bk: BindingKind) extends TypeBounds(lo, hi)(bk) + class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) - abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias)(NoBinding) { + abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) { /** pre: this is a type alias */ def derivedTypeAlias(alias: Type, variance: Int = this.variance)(implicit ctx: Context) = if ((alias eq this.alias) && (variance == this.variance)) this @@ -3240,8 +3225,8 @@ object Types { } object TypeBounds { - def apply(lo: Type, hi: Type, bk: BindingKind = NoBinding)(implicit ctx: Context): TypeBounds = - unique(new RealTypeBounds(lo, hi, bk)) + def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds = + unique(new RealTypeBounds(lo, hi)) def empty(implicit ctx: Context) = apply(defn.NothingType, defn.AnyType) def upper(hi: Type)(implicit ctx: Context) = apply(defn.NothingType, hi) def lower(lo: Type)(implicit ctx: Context) = apply(lo, defn.AnyType) @@ -3253,31 +3238,6 @@ object Types { def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) } - /** A value class defining the interpretation of a TypeBounds - * as either a regular type bounds or a binding (i.e. introduction) of a - * higher-kinded type parameter. - * TODO: drop - */ - class BindingKind(val n: Byte) extends AnyVal { - def join(that: BindingKind) = - if (this == that) this - else if (this == NoBinding || that == NoBinding) NoBinding - else NonvariantBinding - } - - val NoBinding = new BindingKind(0) // Regular type bounds - val ContravariantBinding = new BindingKind(1) // Bounds for contravariant hk type param - val NonvariantBinding = new BindingKind(2) // Bounds for nonvariant hk type param - val CovariantBinding = new BindingKind(3) // Bounds for covariant hk type param - - object BindingKind { - def fromVariance(v: Int): BindingKind = new BindingKind((v + NonvariantBinding.n).toByte) - def toVariance(bk: BindingKind): Int = { - assert(bk.n != 0) - bk.n - NonvariantBinding.n - } - } - // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index b23ee5aba..d9006eda9 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -130,7 +130,7 @@ Standard-Section: "ASTs" TopLevelStat* SUPERtype Length this_Type underlying_Type REFINEDtype Length underlying_Type refinement_NameRef info_Type APPLIEDtype Length tycon_Type arg_Type* - TYPEBOUNDS Length low_Type high_Type bindingKind_Nat? + TYPEBOUNDS Length low_Type high_Type TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)? ANNOTATED Length underlying_Type fullAnnotation_Term ANDtype Length left_Type right_Type diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 6a51b9642..a4fdb2751 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -243,7 +243,6 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(tpe.lo, richTypes) pickleType(tpe.hi, richTypes) - if (tpe.isBinding) writeNat(tpe.bindingKind.n) } case tpe: AnnotatedType => writeByte(ANNOTATED) diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 2d9b82c97..31247c005 100644 --- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -266,10 +266,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) { case APPLIEDtype => readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => - val lo = readType() - val hi = readType() - val bk = ifBefore(end)(new BindingKind(readNat().toByte), NoBinding) - TypeBounds(lo, hi, bk) + TypeBounds(readType(), readType()) case TYPEALIAS => val alias = readType() val variance = diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 6becd29ec..28f3af7cb 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -177,10 +177,6 @@ object Inferencing { def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match { case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi) - case tp @ RefinedType(parent, rname, rinfo) if !parent.typeSymbol.isClass => - tp.derivedRefinedType(widenForMatchSelector(parent), rname, rinfo) - case tp: RecType if !tp.parent.typeSymbol.isClass => - tp.derivedRecType(widenForMatchSelector(tp.parent)) case tp: HKApply => widenForMatchSelector(tp.upperBound) case tp: AnnotatedType => -- cgit v1.2.3 From 68abba180ccfbe7f92f0d2cae29aeae92619e054 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Jun 2016 22:59:50 +0200 Subject: Simplify appliedTo Only use one method instead of a succession of two. --- src/dotty/tools/dotc/core/TypeApplications.scala | 32 +++++++----------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index a09039521..a685c11c7 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -483,25 +483,7 @@ class TypeApplications(val self: Type) extends AnyVal { * 3. If `T` is a polytype, instantiate it to `U1,...,Un`. */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { - if (args.isEmpty || ctx.erasedTypes) self - else self.stripTypeVar match { // TODO investigate why we can't do safeDealias here - case self: GenericType if !args.exists(_.isInstanceOf[TypeBounds]) => - self.instantiate(args) - case EtaExpansion(self1) => - self1.appliedTo(args) - case self1: WildcardType => - self1 - case _ => - self.appliedTo(args, typeParams) - } - } - - /** Encode application `T[U1, ..., Un]` without simplifications, where - * @param self = `T` - * @param args = `U1, ..., Un` - * @param tparams are assumed to be the type parameters of `T`. - */ - final def appliedTo(args: List[Type], typParams: List[TypeParamInfo])(implicit ctx: Context): Type = { + val typParams = self.typeParams def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match { case arg :: args1 => try { @@ -515,8 +497,8 @@ class TypeApplications(val self: Type) extends AnyVal { } case nil => t } - assert(args.nonEmpty) - self.stripTypeVar.safeDealias match { + if (args.isEmpty || ctx.erasedTypes) self + else self.stripTypeVar.safeDealias match { case self: TypeLambda => if (!args.exists(_.isInstanceOf[TypeBounds])) self.instantiate(args) else { @@ -525,6 +507,8 @@ class TypeApplications(val self: Type) extends AnyVal { if (reducer.allReplaced) reduced else HKApply(self, args) } + case self: PolyType => + self.instantiate(args) case self: AndOrType => self.derivedAndOrType(self.tp1.appliedTo(args), self.tp2.appliedTo(args)) case self: TypeAlias => @@ -532,7 +516,9 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeBounds => self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) case self: LazyRef => - LazyRef(() => self.ref.appliedTo(args, typParams)) + LazyRef(() => self.ref.appliedTo(args)) + case self: WildcardType => + self case self: TypeRef if self.symbol == defn.NothingClass => self case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => @@ -557,7 +543,7 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => HKApply(self, args) case _ => - appliedTo(args, typeParams) + appliedTo(args) } /** Turn this type, which is used as an argument for -- cgit v1.2.3 From 30e15aba1226c940493c9fecd68467d7823f2c3d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 30 Jun 2016 10:19:00 +0200 Subject: Cleanup of Signature matching Eliminate sameParams, add comments. Also, minor cleanups elsewhere. --- src/dotty/tools/dotc/config/Config.scala | 6 +++--- src/dotty/tools/dotc/core/OrderingConstraint.scala | 2 +- src/dotty/tools/dotc/core/Signature.scala | 21 +++++++++++++-------- src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++-- 4 files changed, 19 insertions(+), 14 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index eba42e881..796a2e693 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -77,10 +77,10 @@ object Config { */ final val checkProjections = false - /** If this flag is set it is checked that &/| only apply to types + /** If this flag is set, it is checked that &/| only apply to types * that are either both hk types or both * types. Should be used - * only for debugging as there a generic class without arguments - * can be produced in an And by Implicits.liftToClasses. + * only for debugging as the assertion may be violated by Implicits.liftToClasses, + * which can produce an And over a generic class without arguments. */ final val checkKinds = false diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala index d9f6f5721..8dbaee25f 100644 --- a/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -473,7 +473,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, for { (poly, entries) <- boundsMap.toList n <- 0 until paramCount(entries) - if true || isBounds(entries(n)) + if isBounds(entries(n)) } yield PolyParam(poly, n) def forallParams(p: PolyParam => Boolean): Boolean = { diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala index 984315f18..c0647ad1d 100644 --- a/src/dotty/tools/dotc/core/Signature.scala +++ b/src/dotty/tools/dotc/core/Signature.scala @@ -22,20 +22,25 @@ import TypeErasure.sigName * "scala.String".toTypeName) * * The signatures of non-method types are always `NotAMethod`. + * + * There are three kinds of "missing" parts of signatures: + * + * - tpnme.EMPTY Result type marker for NotAMethod and OverloadedSignature + * - tpnme.WILDCARD Arises from a Wildcard or error type + * - tpnme.Uninstantiated Arises from an uninstantiated type variable */ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { import Signature._ - /** Does this signature coincide with that signature on their parameter parts? */ - final def sameParams(that: Signature): Boolean = this.paramsSig == that.paramsSig + /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */ + private def consistent(name1: TypeName, name2: TypeName) = + name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated /** Does this signature coincide with that signature on their parameter parts? * This is the case if all parameter names are _consistent_, i.e. they are either * equal or on of them is tpnme.Uninstantiated. */ final def consistentParams(that: Signature): Boolean = { - def consistent(name1: TypeName, name2: TypeName) = - name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated def loop(names1: List[TypeName], names2: List[TypeName]): Boolean = if (names1.isEmpty) names2.isEmpty else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail) @@ -43,14 +48,14 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { } /** The degree to which this signature matches `that`. - * If both parameter and result type names match (i.e. they are the same + * If parameter names are consistent and result types names match (i.e. they are the same * or one is a wildcard), the result is `FullMatch`. - * If only the parameter names match, the result is `ParamMatch` before erasure and + * If only the parameter names are constistent, the result is `ParamMatch` before erasure and * `NoMatch` otherwise. - * If the parameters do not match, the result is always `NoMatch`. + * If the parameters are inconsistent, the result is always `NoMatch`. */ final def matchDegree(that: Signature)(implicit ctx: Context): MatchDegree = - if (sameParams(that)) + if (consistentParams(that)) if (resSig == that.resSig || isWildcard(resSig) || isWildcard(that.resSig)) FullMatch else if (!ctx.erasedTypes) ParamMatch else NoMatch diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 55a964ee9..bb9124fda 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -431,7 +431,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2 @ MethodType(_, formals2) => def compareMethod = tp1 match { case tp1 @ MethodType(_, formals1) => - (tp1.signature sameParams tp2.signature) && + (tp1.signature consistentParams tp2.signature) && matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) && tp1.isImplicit == tp2.isImplicit && // needed? isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) @@ -442,7 +442,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2: PolyType => def comparePoly = tp1 match { case tp1: PolyType => - (tp1.signature sameParams tp2.signature) && + (tp1.signature consistentParams tp2.signature) && matchingTypeParams(tp1, tp2) && isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) case _ => -- cgit v1.2.3 From b6a8bc77fe3af4f6f722fdbaed8a0f7c8c6774ac Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 30 Jun 2016 11:12:33 +0200 Subject: Revert complications in asMemberOf We needed fairly specific code to make the refinement based hk encoding work without cyclic references. With the new encoding these complications are no longer needed. --- src/dotty/tools/dotc/core/SymDenotations.scala | 5 ----- src/dotty/tools/dotc/core/Types.scala | 24 ++++-------------------- 2 files changed, 4 insertions(+), 25 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 54884a24c..dbde4d6f2 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -92,11 +92,6 @@ trait SymDenotations { this: Context => explain("denotation is not a SymDenotation") } } - - /** An anonymous type denotation with an info `>: Nothing <: Any`. Used to - * avoid stackoverflows when computing members of TypeRefs - */ - lazy val anyTypeDenot = new JointRefDenotation(NoSymbol, TypeBounds.empty, Period.allInRun(ctx.runId)) } object SymDenotations { diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index a2b52d338..6f08971ec 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1594,26 +1594,10 @@ object Types { } } - protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation = { - // we might now get cycles over members that are in a refinement but that lack - // a symbol. Without the following precaution i974.scala stackoverflows when compiled - // with new hk scheme. - // TODO: Do we still need the complications here? - val savedDenot = lastDenotation - val savedSymbol = lastSymbol - if (prefix.isInstanceOf[RecThis] && name.isTypeName) { - lastDenotation = ctx.anyTypeDenot - lastSymbol = NoSymbol - } - try - if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) - else prefix.member(name) - finally - if (lastDenotation eq ctx.anyTypeDenot) { - lastDenotation = savedDenot - lastSymbol = savedSymbol - } - } + protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation = + if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed) + else prefix.member(name) + /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type * to an (unbounded) wildcard type. -- cgit v1.2.3 From 32c0135b59fe02a70ed0b1a693251a0028d479c8 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 30 Jun 2016 14:27:43 +0200 Subject: Refactorings of GenericTypes and underlying/upperbound 1. Make genericType a trait instead of a class. 2. Make TypeLambda a type proxy 3. Split underlying in TypeProxy into underlying and superType 4. Cleanups in many other places --- src/dotty/tools/dotc/core/SymDenotations.scala | 6 +- src/dotty/tools/dotc/core/TypeApplications.scala | 9 +- src/dotty/tools/dotc/core/TypeComparer.scala | 13 +- src/dotty/tools/dotc/core/TypeErasure.scala | 2 - src/dotty/tools/dotc/core/TypeOps.scala | 9 +- src/dotty/tools/dotc/core/Types.scala | 195 +++++++++------------ .../tools/dotc/transform/ElimStaticThis.scala | 4 +- .../tools/dotc/transform/SuperAccessors.scala | 6 - src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- 9 files changed, 100 insertions(+), 146 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index dbde4d6f2..e3cbf6669 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1661,12 +1661,10 @@ object SymDenotations { if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents) else NoType case _ => - baseTypeRefOf(tp.underlying) + baseTypeRefOf(tp.superType) } - case tp: HKApply => - baseTypeRefOf(tp.upperBound) // TODO drop? case tp: TypeProxy => - baseTypeRefOf(tp.underlying) + baseTypeRefOf(tp.superType) case AndType(tp1, tp2) => baseTypeRefOf(tp1) & baseTypeRefOf(tp2) case OrType(tp1, tp2) => diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index a685c11c7..94ca7a3ac 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -275,7 +275,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (params.exists(_.name == self.refinedName)) parent1 else self.derivedRefinedType(parent1, self.refinedName, self.refinedInfo) case self: TypeProxy => - self.underlying.widenToNamedTypeParams(params) + self.superType.widenToNamedTypeParams(params) case self: AndOrType => self.derivedAndOrType( self.tp1.widenToNamedTypeParams(params), self.tp2.widenToNamedTypeParams(params)) @@ -317,7 +317,6 @@ class TypeApplications(val self: Type) extends AnyVal { case self: SingletonType => -1 case self: TypeVar => self.origin.knownHK case self: WildcardType => self.optBounds.knownHK - case self: PolyParam => self.underlying.knownHK case self: TypeProxy => self.underlying.knownHK case NoType | _: LazyType => 0 case _ => -1 @@ -568,7 +567,7 @@ class TypeApplications(val self: Type) extends AnyVal { final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] = if (self derivesFrom base) self match { - case self: HKApply => self.upperBound.baseArgInfos(base) + case self: HKApply => self.superType.baseArgInfos(base) case _ => base.typeParams.map(param => self.member(param.name).info.argInfo) } else @@ -596,7 +595,7 @@ class TypeApplications(val self: Type) extends AnyVal { final def firstBaseArgInfo(base: Symbol)(implicit ctx: Context): Type = base.typeParams match { case param :: _ if self derivesFrom base => self match { - case self: HKApply => self.upperBound.firstBaseArgInfo(base) + case self: HKApply => self.superType.firstBaseArgInfo(base) case _ => self.member(param.name).info.argInfo } case _ => @@ -621,7 +620,7 @@ class TypeApplications(val self: Type) extends AnyVal { case tp: TermRef => tp.underlying.baseTypeWithArgs(base) case tp: HKApply => - tp.upperBound.baseTypeWithArgs(base) + tp.superType.baseTypeWithArgs(base) case AndType(tp1, tp2) => tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base) case OrType(tp1, tp2) => diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index bb9124fda..34fe8629e 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -677,7 +677,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2) case tycon1: TypeProxy => - isSubType(tycon1.underlying.bounds.hi.applyIfParameterized(args1), tp2) + isSubType(tycon1.superType.applyIfParameterized(args1), tp2) case _ => false } @@ -1218,20 +1218,17 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2)))) else if (tparams2.isEmpty) original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2) - else { - val numArgs = tparams1.length - def argRefs(tl: GenericType) = List.range(0, numArgs).map(PolyParam(tl, _)) + else TypeLambda( - paramNames = tpnme.syntheticLambdaParamNames(numArgs), + paramNames = tpnme.syntheticLambdaParamNames(tparams1.length), variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) => (tparam1.paramVariance + tparam2.paramVariance) / 2))( paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) => tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds & tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds), resultTypeExp = tl => - original(tl.lifted(tparams1, tp1).appliedTo(argRefs(tl)), - tl.lifted(tparams2, tp2).appliedTo(argRefs(tl)))) - } + original(tl.lifted(tparams1, tp1).appliedTo(tl.paramRefs), + tl.lifted(tparams2, tp2).appliedTo(tl.paramRefs))) } /** Try to distribute `&` inside type, detect and handle conflicts diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala index 74d2d193f..2e84102f5 100644 --- a/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/src/dotty/tools/dotc/core/TypeErasure.scala @@ -356,8 +356,6 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean case rt => tp.derivedMethodType(tp.paramNames, formals, rt) } - case tp: TypeLambda => - this(tp.resultType) case tp: PolyType => this(tp.resultType) match { case rt: MethodType => rt diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 3a797cce3..80e0fc6f1 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -194,10 +194,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object. case tp: RefinedType => isClassRef(tp.parent) case _ => false } - def next(tp: TypeProxy) = tp.underlying match { - case TypeBounds(_, hi) => hi - case nx => nx - } + /** If `tp1` and `tp2` are typebounds, try to make one fit into the other * or to make them equal, by instantiating uninstantiated type variables. */ @@ -238,13 +235,13 @@ trait TypeOps { this: Context => // TODO: Make standalone object. case tp1: RecType => tp1.rebind(approximateOr(tp1.parent, tp2)) case tp1: TypeProxy if !isClassRef(tp1) => - approximateUnion(next(tp1) | tp2) + approximateUnion(tp1.superType | tp2) case _ => tp2 match { case tp2: RecType => tp2.rebind(approximateOr(tp1, tp2.parent)) case tp2: TypeProxy if !isClassRef(tp2) => - approximateUnion(tp1 | next(tp2)) + approximateUnion(tp1 | tp2.superType) case _ => val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect) val doms = dominators(commonBaseClasses, Nil) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 6f08971ec..85c8fd623 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -147,11 +147,7 @@ object Types { def loop(tp: Type) = tp match { case tp: TypeRef => val sym = tp.symbol - if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls) - case tp: TypeLambda => - tp.resType.derivesFrom(cls) - case tp: HKApply => - tp.tycon.derivesFrom(cls) + if (sym.isClass) sym.derivesFrom(cls) else tp.superType.derivesFrom(cls) case tp: TypeProxy => tp.underlying.derivesFrom(cls) case tp: AndType => @@ -218,29 +214,6 @@ object Types { /** Is this an alias TypeBounds? */ def isAlias: Boolean = this.isInstanceOf[TypeAlias] - /** Is this type a transitive refinement of the given type? - * This is true if the type consists of 0 or more refinements or other - * non-singleton proxies that lead to the `prefix` type. ClassInfos with - * the same class are counted as equal for this purpose. - */ - def refines(prefix: Type)(implicit ctx: Context): Boolean = { - val prefix1 = prefix.dealias - def loop(tp: Type): Boolean = - (tp eq prefix1) || { - tp match { - case base: ClassInfo => - prefix1 match { - case prefix1: ClassInfo => base.cls eq prefix1.cls - case _ => false - } - case base: SingletonType => false - case base: TypeProxy => loop(base.underlying) - case _ => false - } - } - loop(this) - } - // ----- Higher-order combinators ----------------------------------- /** Returns true if there is a part of this type that satisfies predicate `p`. @@ -300,11 +273,9 @@ object Types { constant.tpe.classSymbol case tp: TypeRef => val sym = tp.symbol - if (sym.isClass) sym else tp.underlying.classSymbol + if (sym.isClass) sym else tp.superType.classSymbol case tp: ClassInfo => tp.cls - case tp: TypeLambda => - tp.resType.classSymbol case tp: SingletonType => NoSymbol case tp: TypeProxy => @@ -332,9 +303,7 @@ object Types { tp.cls :: Nil case tp: TypeRef => val sym = tp.symbol - if (sym.isClass) sym.asClass :: Nil else tp.underlying.classSymbols - case tp: TypeLambda => - tp.resType.classSymbols + if (sym.isClass) sym.asClass :: Nil else tp.superType.classSymbols case tp: TypeProxy => tp.underlying.classSymbols case AndType(l, r) => @@ -464,8 +433,6 @@ object Types { go(tp.underlying) case tp: ClassInfo => tp.cls.findMember(name, pre, excluded) - case tp: TypeLambda => - go(tp.resType) case AndType(l, r) => goAnd(l, r) case OrType(l, r) => @@ -548,12 +515,10 @@ object Types { def goApply(tp: HKApply) = tp.tycon match { case tl: TypeLambda => - val res = - go(tl.resType).mapInfo(info => - tl.derivedTypeLambda(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args)) - //println(i"remapping $tp . $name to ${res.info}")// " / ${res.toString}") - res - case _ => go(tp.underlying) + go(tl.resType).mapInfo(info => + tl.derivedTypeLambda(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args)) + case _ => + go(tp.superType) } def goThis(tp: ThisType) = { @@ -878,16 +843,7 @@ object Types { /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. */ - final def dealias(implicit ctx: Context): Type = strictDealias match { - case tp: LazyRef => tp.ref.dealias - case tp => tp - } - - /** Follow aliases and instantiated TypeVars until type - * is no longer alias type, or instantiated type variable. - * Do not follow LazyRefs - */ - final def strictDealias(implicit ctx: Context): Type = this match { + final def dealias(implicit ctx: Context): Type = this match { case tp: TypeRef => if (tp.symbol.isClass) tp else tp.info match { @@ -899,6 +855,8 @@ object Types { if (tp1.exists) tp1.dealias else tp case tp: AnnotatedType => tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot) + case tp: LazyRef => + tp.ref.dealias case _ => this } @@ -1066,7 +1024,7 @@ object Types { /** The full parent types, including all type arguments */ def parentsWithArgs(implicit ctx: Context): List[Type] = this match { - case tp: TypeProxy => tp.underlying.parentsWithArgs + case tp: TypeProxy => tp.superType.parentsWithArgs case _ => List() } @@ -1080,7 +1038,7 @@ object Types { def givenSelfType(implicit ctx: Context): Type = this match { case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType) case tp: ThisType => tp.tref.givenSelfType - case tp: TypeProxy => tp.underlying.givenSelfType + case tp: TypeProxy => tp.superType.givenSelfType case _ => NoType } @@ -1289,8 +1247,15 @@ object Types { * Each implementation is expected to redefine the `underlying` method. */ abstract class TypeProxy extends Type { + /** The type to which this proxy forwards operations. */ def underlying(implicit ctx: Context): Type + + /** The closest supertype of this type. This is the same as `underlying`, + * except for TypeRefs where the upper bound is returned, and HKApplys, + * where the upper bound of the constructor is re-applied to the arguments. + */ + def superType(implicit ctx: Context): Type = underlying } // Every type has to inherit one of the following four abstract type classes., @@ -1778,10 +1743,11 @@ object Types { type ThisType = TypeRef - override def underlying(implicit ctx: Context): Type = { - val res = info - assert(res != this, this) // TODO drop - res + override def underlying(implicit ctx: Context): Type = info + + override def superType(implicit ctx: Context): Type = info match { + case TypeBounds(_, hi) => hi + case _ => info } } @@ -2107,11 +2073,7 @@ object Types { case _ => false } - override def computeHash = { - assert(parent.exists) - doHash(refinedName, refinedInfo, parent) - } - + override def computeHash = doHash(refinedName, refinedInfo, parent) override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)" } @@ -2161,7 +2123,7 @@ object Types { tp match { case tp: TypeRef => apply(x, tp.prefix) case tp: RecThis => RecType.this eq tp.binder - case tp: LazyRef => true // Assume a reference to be safe. + case tp: LazyRef => true // To be safe, assume a reference exists case _ => foldOver(x, tp) } } @@ -2170,12 +2132,9 @@ object Types { } override def computeHash = doHash(parent) - override def toString = s"RecType($parent | $hashCode)" - private def checkInst(implicit ctx: Context): this.type = { - this - } + private def checkInst(implicit ctx: Context): this.type = this } object RecType { @@ -2521,23 +2480,31 @@ object Types { } } - /** A common superclass of PolyType and TypeLambda */ - abstract class GenericType(val paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) - extends CachedGroundType with BindingType with TermType { + /** A common supertrait of PolyType and TypeLambda */ + trait GenericType extends BindingType with TermType { - val paramBounds = paramBoundsExp(this) - val resType = resultTypeExp(this) + /** The names of the type parameters */ + val paramNames: List[TypeName] - assert(resType ne null) + /** The bounds of the type parameters */ + val paramBounds: List[TypeBounds] - override def resultType(implicit ctx: Context) = resType + /** The result type of a PolyType / body of a type lambda */ + val resType: Type /** If this is a type lambda, the variances of its parameters, otherwise Nil.*/ - def variances: List[Int] = Nil + def variances: List[Int] + + override def resultType(implicit ctx: Context) = resType + /** Unconditionally create a new generic type like this one with given elements */ + def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): GenericType + + /** Instantiate result type by substituting parameters with given arguments */ final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type = resultType.substParams(this, argTypes) + /** Instantiate parameter bounds by substituting parameters with given arguments */ def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] = paramBounds.mapConserve(_.substParams(this, argTypes).bounds) @@ -2545,14 +2512,16 @@ object Types { if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this else duplicate(paramNames, paramBounds, resType) - def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): GenericType + /** PolyParam references to all type parameters of this type */ + def paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _)) - def lifted(tparams: List[TypeParamInfo], t: Type)(implicit ctx: Context): Type = + /** The type `[tparams := paramRefs] tp`, where `tparams` can be + * either a list of type parameter symbols or a list of lambda parameters + */ + def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type = tparams match { - case LambdaParam(poly, _) :: _ => - t.subst(poly, this) - case tparams: List[Symbol] => - t.subst(tparams, tparams.indices.toList.map(PolyParam(this, _))) + case LambdaParam(poly, _) :: _ => tp.subst(poly, this) + case tparams: List[Symbol] => tp.subst(tparams, paramRefs) } override def equals(other: Any) = other match { @@ -2563,15 +2532,14 @@ object Types { other.variances == this.variances case _ => false } - - override def computeHash = { - doHash(variances ::: paramNames, resType, paramBounds) - } } /** A type for polymorphic methods */ - class PolyType(paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) - extends GenericType(paramNames)(paramBoundsExp, resultTypeExp) with MethodOrPoly { + class PolyType(val paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) + extends CachedGroundType with GenericType with MethodOrPoly { + val paramBounds = paramBoundsExp(this) + val resType = resultTypeExp(this) + def variances = Nil protected def computeSignature(implicit ctx: Context) = resultSignature @@ -2589,6 +2557,8 @@ object Types { x => resType.subst(this, x)) override def toString = s"PolyType($paramNames, $paramBounds, $resType)" + + override def computeHash = doHash(paramNames, resType, paramBounds) } object PolyType { @@ -2606,12 +2576,16 @@ object Types { // ----- HK types: TypeLambda, LambdaParam, HKApply --------------------- /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */ - class TypeLambda(paramNames: List[TypeName], override val variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) - extends GenericType(paramNames)(paramBoundsExp, resultTypeExp) with ValueType { + class TypeLambda(val paramNames: List[TypeName], val variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type) + extends CachedProxyType with GenericType with ValueType { + val paramBounds = paramBoundsExp(this) + val resType = resultTypeExp(this) assert(resType.isInstanceOf[TermType], this) assert(paramNames.nonEmpty) + override def underlying(implicit ctx: Context) = resType + lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) @@ -2631,6 +2605,8 @@ object Types { x => resType.subst(this, x)) override def toString = s"TypeLambda($variances, $paramNames, $paramBounds, $resType)" + + override def computeHash = doHash(variances ::: paramNames, resType, paramBounds) } /** The parameter of a type lambda */ @@ -2663,27 +2639,14 @@ object Types { /** A higher kinded type application `C[T_1, ..., T_n]` */ abstract case class HKApply(tycon: Type, args: List[Type]) extends CachedProxyType with ValueType { - override def underlying(implicit ctx: Context): Type = upperBound - def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = - if ((tycon eq this.tycon) && (args eq this.args)) this - else tycon.appliedTo(args) - override def computeHash = doHash(tycon, args) + override def underlying(implicit ctx: Context): Type = tycon - def upperBound(implicit ctx: Context): Type = tycon match { - case tp: TypeRef => - tp.info match { - case TypeBounds(_, hi) => hi.appliedTo(args) - case _ => tp - } - case tp: TypeProxy => tp.underlying.appliedTo(args) + override def superType(implicit ctx: Context): Type = tycon match { + case tp: TypeLambda => defn.AnyType + case tp: TypeProxy => tp.superType.appliedTo(args) case _ => defn.AnyType } - - def typeParams(implicit ctx: Context): List[TypeParamInfo] = { - val tparams = tycon.typeParams - if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams - } /* def lowerBound(implicit ctx: Context): Type = tycon.stripTypeVar match { case tp: TypeRef => @@ -2691,6 +2654,17 @@ object Types { case _ => defn.NothingType } */ + def typeParams(implicit ctx: Context): List[TypeParamInfo] = { + val tparams = tycon.typeParams + if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams + } + + def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = + if ((tycon eq this.tycon) && (args eq this.args)) this + else tycon.appliedTo(args) + + override def computeHash = doHash(tycon, args) + protected def checkInst(implicit ctx: Context): this.type = { def check(tycon: Type): Unit = tycon.stripTypeVar match { case tycon: TypeRef if !tycon.symbol.isClass => @@ -2782,7 +2756,7 @@ object Types { override def underlying(implicit ctx: Context): Type = { val bounds = binder.paramBounds - if (bounds == null) NoType // this can happen if the references generic type is not initialized yet + if (bounds == null) NoType // this can happen if the referenced generic type is not initialized yet else bounds(paramNum) } // no customized hashCode/equals needed because cycle is broken in PolyType @@ -3166,8 +3140,7 @@ object Types { override def computeHash = doHash(variance, lo, hi) override def equals(that: Any): Boolean = that match { case that: TypeBounds => - (this.lo eq that.lo) && (this.hi eq that.hi) && - (this.variance == that.variance) + (this.lo eq that.lo) && (this.hi eq that.hi) && (this.variance == that.variance) case _ => false } @@ -3848,8 +3821,6 @@ object Types { class MergeError(msg: String, val tp1: Type, val tp2: Type) extends TypeError(msg) - @sharable val dummyRec = new RecType(rt => NoType) - // ----- Debug --------------------------------------------------------- @sharable var debugTrace = false diff --git a/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/src/dotty/tools/dotc/transform/ElimStaticThis.scala index 3afcfa685..0601e0122 100644 --- a/src/dotty/tools/dotc/transform/ElimStaticThis.scala +++ b/src/dotty/tools/dotc/transform/ElimStaticThis.scala @@ -27,8 +27,8 @@ class ElimStaticThis extends MiniPhaseTransform { override def transformIdent(tree: tpd.Ident)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = { if (ctx.owner.enclosingMethod.is(JavaStatic)) { tree.tpe match { - case TermRef(thiz: ThisType, _) if thiz.underlying.typeSymbol.is(ModuleClass) => - ref(thiz.underlying.typeSymbol.sourceModule).select(tree.symbol) + case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass) => + ref(thiz.cls.sourceModule).select(tree.symbol) case TermRef(thiz: ThisType, _) => assert(tree.symbol.is(Flags.JavaStatic)) tree diff --git a/src/dotty/tools/dotc/transform/SuperAccessors.scala b/src/dotty/tools/dotc/transform/SuperAccessors.scala index ae9c493ae..6af991f27 100644 --- a/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -167,12 +167,6 @@ class SuperAccessors(thisTransformer: DenotTransformer) { val accName = sym.name.protectedAccessorName - def isThisType(tpe: Type): Boolean = tpe match { - case tpe: ThisType => !tpe.cls.is(PackageClass) - case tpe: TypeProxy => isThisType(tpe.underlying) - case _ => false - } - // if the result type depends on the this type of an enclosing class, the accessor // has to take an object of exactly this type, otherwise it's more general val receiverType = diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index 28f3af7cb..e024192c3 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -178,7 +178,7 @@ object Inferencing { case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi) case tp: HKApply => - widenForMatchSelector(tp.upperBound) + widenForMatchSelector(tp.superType) case tp: AnnotatedType => tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot) case tp => tp -- cgit v1.2.3 From 34a068b7f6039637d6f1330e3d071f5bf75e9cec Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 30 Jun 2016 18:42:29 +0200 Subject: Drop Config.checkKinds Allows us to drop also the involved knownHK method. Lots of other cleanups. --- src/dotty/tools/dotc/config/Config.scala | 7 -- src/dotty/tools/dotc/core/TypeApplications.scala | 75 ++-------------------- src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- src/dotty/tools/dotc/core/Types.scala | 5 +- src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 1 - src/dotty/tools/dotc/core/tasty/TreePickler.scala | 7 +- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 9 +-- src/dotty/tools/dotc/transform/ElimRepeated.scala | 2 +- src/dotty/tools/dotc/transform/PostTyper.scala | 3 +- src/dotty/tools/dotc/typer/Applications.scala | 4 +- src/dotty/tools/dotc/typer/Checking.scala | 8 +-- src/dotty/tools/dotc/typer/Implicits.scala | 2 +- src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- 13 files changed, 20 insertions(+), 107 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 796a2e693..3cc3091b5 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -77,13 +77,6 @@ object Config { */ final val checkProjections = false - /** If this flag is set, it is checked that &/| only apply to types - * that are either both hk types or both * types. Should be used - * only for debugging as the assertion may be violated by Implicits.liftToClasses, - * which can produce an And over a generic class without arguments. - */ - final val checkKinds = false - /** The recursion depth for showing a summarized string */ final val summarizeDepth = 2 diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 94ca7a3ac..580cd6569 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -209,7 +209,7 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** If `self` is a higher-kinded type, its type parameters $hk_i, otherwise Nil */ + /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */ final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] = if (isHK) typeParams else Nil @@ -295,57 +295,10 @@ class TypeApplications(val self: Type) extends AnyVal { case _ => false } - /** Computes the kind of `self` without forcing anything. - * @return 1 if type is known to be higher-kinded - * -1 if type is known to be a * type - * 0 if kind of `self` is unknown (because symbols have not yet completed) - */ - def knownHK(implicit ctx: Context): Int = self match { - case self: TypeRef => - val tsym = self.symbol - if (tsym.isClass) -1 - else tsym.infoOrCompleter match { - case completer: TypeParamsCompleter => - if (completer.completerTypeParams(tsym).nonEmpty) 1 else -1 - case _ => - if (!tsym.isCompleting || tsym.isAliasType) tsym.info.knownHK - else 0 - } - case self: RefinedType => -1 - case self: TypeLambda => 1 - case self: HKApply => -1 - case self: SingletonType => -1 - case self: TypeVar => self.origin.knownHK - case self: WildcardType => self.optBounds.knownHK - case self: TypeProxy => self.underlying.knownHK - case NoType | _: LazyType => 0 - case _ => -1 - } - - /** True if it can be determined without forcing that the class symbol - * of this application exists. Equivalent to - * - * self.classSymbol.exists - * - * but without forcing anything. - */ - def safeIsClassRef(implicit ctx: Context): Boolean = self.stripTypeVar match { - case self: RefinedOrRecType => - self.parent.safeIsClassRef - case self: TypeRef => - self.denot.exists && { - val sym = self.symbol - sym.isClass || - sym.isCompleted && self.info.isAlias - } - case _ => - false - } - /** Dealias type if it can be done without forcing the TypeRef's info */ def safeDealias(implicit ctx: Context): Type = self match { case self: TypeRef if self.denot.exists && self.symbol.isAliasType => - self.info.bounds.hi.stripTypeVar.safeDealias + self.superType.stripTypeVar.safeDealias case _ => self } @@ -389,14 +342,6 @@ class TypeApplications(val self: Type) extends AnyVal { //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") } - /** Eta expand the prefix in front of any refinements. */ - def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match { - case self: RefinedType => - self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo) - case _ => - self.EtaExpand(self.typeParamSymbols) - } - /** If self is not higher-kinded, eta expand it. */ def ensureHK(implicit ctx: Context): Type = if (isHK) self else EtaExpansion(self) @@ -566,7 +511,8 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] = if (self derivesFrom base) - self match { + self.dealias match { + case self: TypeRef if !self.symbol.isClass => self.superType.baseArgInfos(base) case self: HKApply => self.superType.baseArgInfos(base) case _ => base.typeParams.map(param => self.member(param.name).info.argInfo) } @@ -591,17 +537,6 @@ class TypeApplications(val self: Type) extends AnyVal { final def baseArgTypesHi(base: Symbol)(implicit ctx: Context): List[Type] = baseArgInfos(base) mapConserve boundsToHi - /** The first type argument of the base type instance wrt `base` of this type */ - final def firstBaseArgInfo(base: Symbol)(implicit ctx: Context): Type = base.typeParams match { - case param :: _ if self derivesFrom base => - self match { - case self: HKApply => self.superType.firstBaseArgInfo(base) - case _ => self.member(param.name).info.argInfo - } - case _ => - NoType - } - /** The base type including all type arguments and applicable refinements * of this type. Refinements are applicable if they refine a member of * the parent type which furthermore is not a name-mangled type parameter. @@ -711,6 +646,6 @@ class TypeApplications(val self: Type) extends AnyVal { def elemType(implicit ctx: Context): Type = self match { case defn.ArrayOf(elemtp) => elemtp case JavaArrayType(elemtp) => elemtp - case _ => firstBaseArgInfo(defn.SeqClass) + case _ => baseArgInfos(defn.SeqClass).headOption.getOrElse(NoType) } } diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 34fe8629e..d310bd844 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1353,7 +1353,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => tp.show } if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2) - else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") + else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") // flip condition for debugging } /** Merge two lists of names. If names in corresponding positions match, keep them, diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 85c8fd623..786c74069 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -1744,7 +1744,7 @@ object Types { type ThisType = TypeRef override def underlying(implicit ctx: Context): Type = info - + override def superType(implicit ctx: Context): Type = info match { case TypeBounds(_, hi) => hi case _ => info @@ -2203,8 +2203,6 @@ object Types { object AndType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType], i"$tp1 & $tp2 / " + s"$tp1 & $tp2") - if (Config.checkKinds) - assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 & $tp2 / " + s"$tp1 & $tp2") unchecked(tp1, tp2) } def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = { @@ -2239,7 +2237,6 @@ object Types { object OrType { def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = { assertUnerased() - if (Config.checkKinds) assert((tp1.knownHK - tp2.knownHK).abs <= 1, i"$tp1 | $tp2") unique(new CachedOrType(tp1, tp2)) } def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type = diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index d9006eda9..394d8f11a 100644 --- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -499,7 +499,6 @@ object TastyFormat { SELFDEF | REFINEDtype => 1 case RENAMED | PARAMtype => 2 case POLYtype | METHODtype => -1 - case TYPEBOUNDS => -2 case _ => 0 } diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala index a4fdb2751..be3999533 100644 --- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -158,7 +158,7 @@ class TreePickler(pickler: TastyPickler) { case ConstantType(value) => pickleConstant(value) case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) => - pickleType(tpe.info.bounds.hi) + pickleType(tpe.superType) case tpe: WithFixedSym => val sym = tpe.symbol def pickleRef() = @@ -240,10 +240,7 @@ class TreePickler(pickler: TastyPickler) { } case tpe: TypeBounds => writeByte(TYPEBOUNDS) - withLength { - pickleType(tpe.lo, richTypes) - pickleType(tpe.hi, richTypes) - } + withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) } case tpe: AnnotatedType => writeByte(ANNOTATED) withLength { pickleType(tpe.tpe, richTypes); pickleTree(tpe.annot.tree) } diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 1da92d723..e8f3d63a9 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -135,13 +135,6 @@ object Scala2Unpickler { denot.info = ClassInfo( // final info, except possibly for typeparams ordering denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost) denot.updateTypeParams(tparams) - - // Curiously the following line is needed to make pos/i859.scala compile. - // This test simply accesses scala.tools.nsc.Global. I could not track down why - // the reference is needed - referencing any field of the type parameter - // does the trick, no completion is needed (in fact such completion would - // cause cyclic references elsewhere). - assert(denot.typeParams.forall(_.exists)) } } @@ -593,7 +586,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val tag = readByte() val end = readNat() + readIndex if (tag == POLYtpe) { - val unusedRestperef = readNat() + val unusedRestpeRef = readNat() until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]] } else Nil } diff --git a/src/dotty/tools/dotc/transform/ElimRepeated.scala b/src/dotty/tools/dotc/transform/ElimRepeated.scala index 30778267d..258b7f234 100644 --- a/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -74,7 +74,7 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati case SeqLiteral(elems, elemtpt) => JavaSeqLiteral(elems, elemtpt) case _ => - val elemType = tree.tpe.firstBaseArgInfo(defn.SeqClass) + val elemType = tree.tpe.elemType var elemClass = elemType.classSymbol if (defn.PhantomClasses contains elemClass) elemClass = defn.ObjectClass ref(defn.DottyArraysModule) diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala index 057026a67..b71284049 100644 --- a/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/src/dotty/tools/dotc/transform/PostTyper.scala @@ -13,6 +13,7 @@ import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTrans import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._ import util.Positions._ import Decorators._ +import config.Printers._ import Symbols._, TypeUtils._ /** A macro transform that runs immediately after typer and that performs the following functions: @@ -121,7 +122,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran */ private def fixSignature[T <: Tree](tree: T)(implicit ctx: Context): T = tree.tpe match { case tpe: TermRefWithSignature if tpe.signature.isUnderDefined => - println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}") + typr.println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}") tree.withType(TermRef.withSig(tpe.prefix, tpe.name, tpe.widen.signature)).asInstanceOf[T] case _ => tree } diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index caae422d3..aba073f3d 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -67,7 +67,7 @@ object Applications { if (extractorMemberType(unapplyResult, nme.isDefined, pos) isRef defn.BooleanClass) { if (getTp.exists) if (unapplyFn.symbol.name == nme.unapplySeq) { - val seqArg = boundsToHi(getTp.firstBaseArgInfo(defn.SeqClass)) + val seqArg = boundsToHi(getTp.elemType) if (seqArg.exists) return args map Function.const(seqArg) } else return getUnapplySelectors(getTp, args, pos) @@ -629,7 +629,7 @@ trait Applications extends Compatibility { self: Typer => def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = track("typedTypeApply") { val isNamed = hasNamedArg(tree.args) - var typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) + val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt)) typedFn.tpe.widen match { case pt: PolyType => diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index e77222beb..3aa63aeeb 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -34,7 +34,8 @@ object Checking { import tpd._ /** A general checkBounds method that can be used for TypeApply nodes as - * well as for AppliedTypeTree nodes. + * well as for AppliedTypeTree nodes. Also checks that type arguments to + * *-type parameters are fully applied. */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) = { (args, boundss).zipped.foreach { (arg, bound) => @@ -223,9 +224,6 @@ object Checking { val checker = new CheckNonCyclicMap(sym, reportErrors)(ctx.addMode(Mode.CheckCyclic)) try checker.checkInfo(info) catch { - case ex: AssertionError => - println(s"assertion error for $info") - throw ex case ex: CyclicReference => if (reportErrors) { ctx.error(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.pos) @@ -364,7 +362,7 @@ object Checking { // try to dealias to avoid a leak error val savedErrors = errors errors = prevErrors - val tp2 = apply(tp.info.bounds.hi) + val tp2 = apply(tp.superType) if (errors eq prevErrors) tp1 = tp2 else errors = savedErrors } diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala index 91a67de9a..feed398aa 100644 --- a/src/dotty/tools/dotc/typer/Implicits.scala +++ b/src/dotty/tools/dotc/typer/Implicits.scala @@ -772,7 +772,7 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) { case tp: RefinedType => foldOver(n + 1, tp) case tp: TypeRef if tp.info.isAlias => - apply(n, tp.info.bounds.hi) + apply(n, tp.superType) case _ => foldOver(n, tp) } diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala index e024192c3..c60f4c1f2 100644 --- a/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/src/dotty/tools/dotc/typer/Inferencing.scala @@ -176,7 +176,7 @@ object Inferencing { /** Recursively widen and also follow type declarations and type aliases. */ def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match { case tp: TypeRef if !tp.symbol.isClass => - widenForMatchSelector(tp.info.bounds.hi) + widenForMatchSelector(tp.superType) case tp: HKApply => widenForMatchSelector(tp.superType) case tp: AnnotatedType => -- cgit v1.2.3 From beff8f857b53096326f62b615250e39386c579c6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 2 Jul 2016 16:51:37 +0200 Subject: Avoid orphan parameters in Constraint#replace The previous implementation of `replace` made it possible that formerly constrained parameters would stay on embedded in bounds of other constraint enrties. --- src/dotty/tools/dotc/core/OrderingConstraint.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala index 8dbaee25f..1e284c341 100644 --- a/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -419,7 +419,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def replaceIn(tp: Type, isUpper: Boolean): Type = tp match { case `param` => normalize(replacement, isUpper) case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper) - case _ => tp + case _ => tp.substParam(param, replacement) } bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true)) -- cgit v1.2.3 From eebb4b07bf3011de56f297e7d5357cbc1ee7d623 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sun, 3 Jul 2016 18:15:41 +0200 Subject: Fix bug in printing untyped New nodes. Printed as before. --- src/dotty/tools/dotc/printing/RefinedPrinter.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 34456d0b9..ca62827af 100644 --- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -299,7 +299,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "new " ~ { tpt match { case tpt: Template => toTextTemplate(tpt, ofNew = true) - case _ => toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false)) + case _ => + if (tpt.hasType) + toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false)) + else + toTextLocal(tpt) } } case Pair(l, r) => -- cgit v1.2.3 From fd62c7b6dc6882f658ba2d614cb95a7141842929 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 7 Jul 2016 15:14:37 +0200 Subject: Disallow higher-kinded types that simulate general existential types We cannot handle such types in general. So we now check that a hk application C[args] where some of the arguments are wildcards does not have as a supertype a hk application ([X] -> B)[args] --- src/dotty/tools/dotc/core/Mode.scala | 5 + src/dotty/tools/dotc/core/TypeApplications.scala | 41 +++- src/dotty/tools/dotc/typer/Checking.scala | 22 ++ test/dotc/tests.scala | 1 - tests/neg/existentials.scala | 61 ++++++ tests/pos-scala2/GenTraversableFactory.scala | 252 +++++++++++++++++++++++ tests/pos-scala2/t6014.scala | 13 ++ tests/pos/GenTraversableFactory.scala | 252 ----------------------- tests/pos/t6014.scala | 13 -- 9 files changed, 389 insertions(+), 271 deletions(-) create mode 100644 tests/neg/existentials.scala create mode 100644 tests/pos-scala2/GenTraversableFactory.scala create mode 100644 tests/pos-scala2/t6014.scala delete mode 100644 tests/pos/GenTraversableFactory.scala delete mode 100644 tests/pos/t6014.scala diff --git a/src/dotty/tools/dotc/core/Mode.scala b/src/dotty/tools/dotc/core/Mode.scala index 0e188ace2..3e9b7effe 100644 --- a/src/dotty/tools/dotc/core/Mode.scala +++ b/src/dotty/tools/dotc/core/Mode.scala @@ -84,5 +84,10 @@ object Mode { /** Use Scala2 scheme for overloading and implicit resolution */ val OldOverloadingResolution = newMode(14, "OldOverloadingResolution") + /** Allow hk applications of type lambdas to wildcard arguments; + * used for checking that such applications do not normally arise + */ + val AllowLambdaWildcardApply = newMode(15, "AllowHKApplyToWildcards") + val PatternOrType = Pattern | Type } diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 580cd6569..be0c08d15 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -129,8 +129,10 @@ object TypeApplications { /** A type map that tries to reduce a (part of) the result type of the type lambda `tycon` * with the given `args`(some of which are wildcard arguments represented by type bounds). * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument - * `>: L <: H` is substituted for a type lambda parameter `X` only if `X` appears - * in a toplevel refinement of the form + * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions. + * + * 1. If Mode.AllowLambdaWildcardApply is set: + * The wildcard argument is substituted only if `X` appears in a toplevel refinement of the form * * { type A = X } * @@ -141,19 +143,48 @@ object TypeApplications { * * The `allReplaced` field indicates whether all occurrences of type lambda parameters * in the reduced type have been replaced with arguments. + * + * 2. If Mode.AllowLambdaWildcardApply is not set: + * All refinements of the form + * + * { type A = X } + * + * are replaced by: + * + * { type A >: L <: U } + * + * Any other occurrence of `X` in `tycon` is replaced by `U`, if the + * occurrence of `X` in `tycon` is covariant, or nonvariant, or by `L`, + * if the occurrence is contravariant. + * + * The idea is that the `AllowLambdaWildcardApply` mode is used to check whether + * a type can be soundly reduced, and to give an error or warning if that + * is not the case. By contrast, the default mode, with `AllowLambdaWildcardApply` + * not set, reduces all applications even if this yields a different type, so + * its postcondition is that no type parameters of `tycon` appear in the + * result type. Using this mode, we can guarantee that `appliedTo` will never + * produce a higher-kinded application with a type lambda as type constructor. */ class Reducer(tycon: TypeLambda, args: List[Type])(implicit ctx: Context) extends TypeMap { private var available = Set((0 until args.length): _*) var allReplaced = true def hasWildcardArg(p: PolyParam) = p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds] + def canReduceWildcard(p: PolyParam) = + !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum) def apply(t: Type) = t match { - case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && available.contains(p.paramNum) => + case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && canReduceWildcard(p) => available -= p.paramNum args(p.paramNum) case p: PolyParam if p.binder == tycon => - if (hasWildcardArg(p)) { allReplaced = false; p } - else args(p.paramNum) + args(p.paramNum) match { + case TypeBounds(lo, hi) => + if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p } + else if (variance < 0) lo + else hi + case arg => + arg + } case _: TypeBounds | _: HKApply => val saved = available available = Set() diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 3aa63aeeb..aa13bdc3d 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -55,6 +55,24 @@ object Checking { def checkBounds(args: List[tpd.Tree], poly: GenericType)(implicit ctx: Context): Unit = checkBounds(args, poly.paramBounds, _.substParams(poly, _)) + /** If type is a higher-kinded application with wildcard arguments, + * check that it or one of its supertypes can be reduced to a normal application. + * Unreducible applications correspond to general existentials, and we + * cannot handle those. + */ + def checkWildcardHKApply(tp: Type, pos: Position)(implicit ctx: Context): Unit = tp match { + case tp @ HKApply(tycon, args) if args.exists(_.isInstanceOf[TypeBounds]) => + tycon match { + case tycon: TypeLambda => + ctx.errorOrMigrationWarning( + d"unreducible application of higher-kinded type $tycon to wildcard arguments", + pos) + case _ => + checkWildcardHKApply(tp.superType, pos) + } + case _ => + } + /** Traverse type tree, performing the following checks: * 1. All arguments of applied type trees must conform to their bounds. * 2. Prefixes of type selections and singleton types must be realizable. @@ -74,6 +92,10 @@ object Checking { val bounds = tparams.map(tparam => tparam.info.asSeenFrom(tycon.tpe.normalizedPrefix, tparam.owner.owner).bounds) checkBounds(args, bounds, _.substDealias(tparams, _)) + + def checkValidIfHKApply(implicit ctx: Context): Unit = + checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos) + checkValidIfHKApply(ctx.addMode(Mode.AllowLambdaWildcardApply)) case Select(qual, name) if name.isTypeName => checkRealizable(qual.tpe, qual.pos) case SelectFromTypeTree(qual, name) if name.isTypeName => diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index f98b8114c..4361ccc13 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -29,7 +29,6 @@ class tests extends CompilerTest { else List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef") } - val testPickling = List("-Xprint-types", "-Ytest-pickler", "-Ystop-after:pickler") val twice = List("#runs", "2") diff --git a/tests/neg/existentials.scala b/tests/neg/existentials.scala new file mode 100644 index 000000000..4798504d9 --- /dev/null +++ b/tests/neg/existentials.scala @@ -0,0 +1,61 @@ +object TestList { + + var x: ([X] -> List[List[X]])[_] = List(List(1)) // error: unreducible + var y: ([X] -> List[Seq[X]])[_] = List(List(1)) // error: unreducible + + x = x + y = y + y = x + + val h = x.head + val x1: List[_] = h + + var z: List[_] = x + +} +object TestSet { + + var x: ([Y] -> Set[Set[Y]])[_] = Set(Set("a")) // error: unreducible + var y: ([Y] -> Set[Iterable[Y]])[_] = Set(Set("a")) // error: unreducible + + x = x + y = y + + val h = x.head + val h1: Set[_] = h + + // val p = x.+ // infinite loop in implicit search + + var z: Set[_] = x + +} +class TestX { + + class C[T](x: T) { + def get: T = x + def cmp: T => Boolean = (x == _) + } + + val x: ([Y] -> C[C[Y]])[_] = new C(new C("a")) // error: unreducible + + type CC[X] = C[C[X]] + val y: CC[_] = ??? // error: unreducible + + type D[X] <: C[X] + + type DD = [X] -> D[D[X]] + val z: DD[_] = ??? // error: unreducible + + val g = x.get + + val c = x.cmp +} + +object Test6014 { + case class CC[T](key: T) + type Alias[T] = Seq[CC[T]] + + def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok + def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // error: unreducible application +} + diff --git a/tests/pos-scala2/GenTraversableFactory.scala b/tests/pos-scala2/GenTraversableFactory.scala new file mode 100644 index 000000000..2f93ab27b --- /dev/null +++ b/tests/pos-scala2/GenTraversableFactory.scala @@ -0,0 +1,252 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package scala +package collection +package generic + +import scala.language.higherKinds + +/** A template for companion objects of `Traversable` and subclasses thereof. + * This class provides a set of operations to create `$Coll` objects. + * It is typically inherited by companion objects of subclasses of `Traversable`. + * + * @since 2.8 + * + * @define coll collection + * @define Coll `Traversable` + * @define factoryInfo + * This object provides a set of operations to create `$Coll` values. + * @author Martin Odersky + * @version 2.8 + * @define canBuildFromInfo + * The standard `CanBuildFrom` instance for $Coll objects. + * @see CanBuildFrom + * @define genericCanBuildFromInfo + * The standard `CanBuildFrom` instance for $Coll objects. + * The created value is an instance of class `GenericCanBuildFrom`, + * which forwards calls to create a new builder to the + * `genericBuilder` method of the requesting collection. + * @see CanBuildFrom + * @see GenericCanBuildFrom + */ +abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] +extends GenericCompanion[CC] { + + private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { + override def apply() = newBuilder[Nothing] + } + def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance + + /** A generic implementation of the `CanBuildFrom` trait, which forwards + * all calls to `apply(from)` to the `genericBuilder` method of + * $coll `from`, and which forwards all calls of `apply()` to the + * `newBuilder` method of this factory. + */ + class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] { + /** Creates a new builder on request of a collection. + * @param from the collection requesting the builder to be created. + * @return the result of invoking the `genericBuilder` method on `from`. + */ + def apply(from: Coll) = from.genericBuilder[A] + + /** Creates a new builder from scratch + * @return the result of invoking the `newBuilder` method of this factory. + */ + def apply() = newBuilder[A] + } + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Traversable[A]*): CC[A] = { + val b = newBuilder[A] + // At present we're using IndexedSeq as a proxy for "has a cheap size method". + if (xss forall (_.isInstanceOf[IndexedSeq[_]])) + b.sizeHint(xss.map(_.size).sum) + + for (xs <- xss.seq) b ++= xs + b.result() + } + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3nd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[T: Integral](start: T, end: T, step: T): CC[T] = { + val num = implicitly[Integral[T]] + import num._ + + if (step == zero) throw new IllegalArgumentException("zero step") + val b = newBuilder[T] + b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) + var i = start + while (if (/*num.mkOrderingOps*/(step) < zero) end < i else i < end) { + b += i + i += step + } + b.result() + } + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained inthe $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { + val b = newBuilder[A] + if (len > 0) { + b.sizeHint(len) + var acc = start + var i = 1 + b += acc + + while (i < len) { + acc = f(acc) + i += 1 + b += acc + } + } + b.result() + } +} diff --git a/tests/pos-scala2/t6014.scala b/tests/pos-scala2/t6014.scala new file mode 100644 index 000000000..02535f377 --- /dev/null +++ b/tests/pos-scala2/t6014.scala @@ -0,0 +1,13 @@ +object Test { + case class CC[T](key: T) + type Alias[T] = Seq[CC[T]] + + def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok + def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // migration warning: unreducible application + // ./a.scala:11: error: missing parameter type for expanded function + // The argument types of an anonymous function must be fully known. (SLS 8.5) + // Expected type was: ? + // def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails + // ^ + // one error found +} diff --git a/tests/pos/GenTraversableFactory.scala b/tests/pos/GenTraversableFactory.scala deleted file mode 100644 index 2f93ab27b..000000000 --- a/tests/pos/GenTraversableFactory.scala +++ /dev/null @@ -1,252 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @version 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] -extends GenericCompanion[CC] { - - private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance - - /** A generic implementation of the `CanBuildFrom` trait, which forwards - * all calls to `apply(from)` to the `genericBuilder` method of - * $coll `from`, and which forwards all calls of `apply()` to the - * `newBuilder` method of this factory. - */ - class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] { - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: Coll) = from.genericBuilder[A] - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newBuilder[A] - } - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Traversable[A]*): CC[A] = { - val b = newBuilder[A] - // At present we're using IndexedSeq as a proxy for "has a cheap size method". - if (xss forall (_.isInstanceOf[IndexedSeq[_]])) - b.sizeHint(xss.map(_.size).sum) - - for (xs <- xss.seq) b ++= xs - b.result() - } - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[T: Integral](start: T, end: T, step: T): CC[T] = { - val num = implicitly[Integral[T]] - import num._ - - if (step == zero) throw new IllegalArgumentException("zero step") - val b = newBuilder[T] - b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) - var i = start - while (if (/*num.mkOrderingOps*/(step) < zero) end < i else i < end) { - b += i - i += step - } - b.result() - } - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained inthe $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { - val b = newBuilder[A] - if (len > 0) { - b.sizeHint(len) - var acc = start - var i = 1 - b += acc - - while (i < len) { - acc = f(acc) - i += 1 - b += acc - } - } - b.result() - } -} diff --git a/tests/pos/t6014.scala b/tests/pos/t6014.scala deleted file mode 100644 index 26e258a27..000000000 --- a/tests/pos/t6014.scala +++ /dev/null @@ -1,13 +0,0 @@ -object Test { - case class CC[T](key: T) - type Alias[T] = Seq[CC[T]] - - def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok - def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails - // ./a.scala:11: error: missing parameter type for expanded function - // The argument types of an anonymous function must be fully known. (SLS 8.5) - // Expected type was: ? - // def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails - // ^ - // one error found -} -- cgit v1.2.3 From 4693a78edf4bf52b9060a99ca48785d077e5599b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 7 Jul 2016 18:24:44 +0200 Subject: Less eager removal of type parameters from constraint Previously, a unified or otherwise instantiated type parameter would be removed from the constraint, i.e. if it was the last parameter of its polytype to be instantiated, the polytype would be dropped. This is a potential problem since it means that the alias `param = instance` is forgetten whereas we might still need it in the same subtype test sequence. The solution is to wait with cleaning up polytypes until all associated type variables are fully instantiated. This change uncovered another bug, where we failed to follow an existing instantiation when adding to a constraint. This manifested itself in deep subtype errors for run/colltest1 and some others. --- src/dotty/tools/dotc/core/Constraint.scala | 9 ++++---- src/dotty/tools/dotc/core/ConstraintHandling.scala | 15 +++++++++---- src/dotty/tools/dotc/core/OrderingConstraint.scala | 26 +++++++++------------- 3 files changed, 26 insertions(+), 24 deletions(-) diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala index 38f714131..e480d1bfe 100644 --- a/src/dotty/tools/dotc/core/Constraint.scala +++ b/src/dotty/tools/dotc/core/Constraint.scala @@ -117,12 +117,11 @@ abstract class Constraint extends Showable { */ def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This - /** Is entry associated with `pt` removable? - * @param removedParam The index of a parameter which is still present in the - * entry array, but is going to be removed at the same step, - * or -1 if no such parameter exists. + /** Is entry associated with `pt` removable? This is the case if + * all type parameters of the entry are associated with type variables + * which have its `inst` fields set. */ - def isRemovable(pt: GenericType, removedParam: Int = -1): Boolean + def isRemovable(pt: GenericType): Boolean /** A new constraint with all entries coming from `pt` removed. */ def remove(pt: GenericType)(implicit ctx: Context): This diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index ace441566..dfce9317b 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -164,6 +164,7 @@ trait ConstraintHandling { } } } + assert(constraint.contains(param)) val bound = if (fromBelow) constraint.fullLowerBound(param) else constraint.fullUpperBound(param) val inst = avoidParam(bound) typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}") @@ -282,10 +283,16 @@ trait ConstraintHandling { else NoType case bound: TypeVar if constraint contains bound.origin => prune(bound.underlying) - case bound: PolyParam if constraint contains bound => - if (!addParamBound(bound)) NoType - else if (fromBelow) defn.NothingType - else defn.AnyType + case bound: PolyParam => + constraint.entry(bound) match { + case NoType => bound + case _: TypeBounds => + if (!addParamBound(bound)) NoType + else if (fromBelow) defn.NothingType + else defn.AnyType + case inst => + prune(inst) + } case _ => bound } diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala index 1e284c341..b0170b67c 100644 --- a/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -11,6 +11,7 @@ import config.Config import config.Printers._ import collection.immutable.BitSet import reflect.ClassTag +import annotation.tailrec object OrderingConstraint { @@ -151,7 +152,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def contains(param: PolyParam): Boolean = { val entries = boundsMap(param.binder) - entries != null && entries(param.paramNum).isInstanceOf[TypeBounds] + entries != null && isBounds(entries(param.paramNum)) } def contains(tvar: TypeVar): Boolean = { @@ -428,7 +429,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } var current = - if (isRemovable(poly, idx)) remove(poly) else updateEntry(param, replacement) + if (isRemovable(poly)) remove(poly) else updateEntry(param, replacement) current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) current = lowerLens.map(this, current, p, i, removeParam) @@ -449,20 +450,15 @@ class OrderingConstraint(private val boundsMap: ParamBounds, newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap)) } - def isRemovable(pt: GenericType, removedParam: Int = -1): Boolean = { + def isRemovable(pt: GenericType): Boolean = { val entries = boundsMap(pt) - var noneLeft = true - var i = paramCount(entries) - while (noneLeft && i > 0) { - i -= 1 - if (i != removedParam && isBounds(entries(i))) noneLeft = false - else typeVar(entries, i) match { - case tv: TypeVar => - if (!tv.inst.exists) noneLeft = false // need to keep line around to compute instType - case _ => + @tailrec def allRemovable(last: Int): Boolean = + if (last < 0) true + else typeVar(entries, last) match { + case tv: TypeVar => tv.inst.exists && allRemovable(last - 1) + case _ => false } - } - noneLeft + allRemovable(paramCount(entries) - 1) } // ---------- Exploration -------------------------------------------------------- @@ -473,7 +469,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, for { (poly, entries) <- boundsMap.toList n <- 0 until paramCount(entries) - if isBounds(entries(n)) + if entries(n).exists } yield PolyParam(poly, n) def forallParams(p: PolyParam => Boolean): Boolean = { -- cgit v1.2.3 From 2ddb84947324ced70b26f536bd73d6092e62f0a2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 4 Jul 2016 21:47:31 +0200 Subject: Unify aliases when taking a lub. --- src/dotty/tools/dotc/core/TypeComparer.scala | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index d310bd844..e3c05e55f 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1241,7 +1241,23 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp1: RefinedType => tp2 match { case tp2: RefinedType if tp1.refinedName == tp2.refinedName => - tp1.derivedRefinedType(tp1.parent & tp2.parent, tp1.refinedName, tp1.refinedInfo & tp2.refinedInfo) + // Given two refinements `T1 { X = S1 }` and `T2 { X = S2 }`, if `S1 =:= S2` + // (possibly by instantiating type parameters), rewrite to `T1 & T2 { X = S1 }`. + // Otherwise rewrite to `T1 & T2 { X B }` where `B` is the conjunction of + // the bounds of `X` in `T1` and `T2`. + // The first rule above is contentious because it cuts the constraint set. + // But without it we would replace the two aliases by + // `T { X >: S1 | S2 <: S1 & S2 }`, which looks weird and is probably + // not what's intended. + val rinfo1 = tp1.refinedInfo + val rinfo2 = tp2.refinedInfo + val parent = tp1.parent & tp2.parent + val rinfo = + if (rinfo1.isAlias && rinfo2.isAlias && isSameType(rinfo1, rinfo2)) + rinfo1 + else + rinfo1 & rinfo2 + tp1.derivedRefinedType(parent, tp1.refinedName, rinfo) case _ => NoType } @@ -1505,7 +1521,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean = - traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint") { + traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") { super.addConstraint(param, bound, fromBelow) } -- cgit v1.2.3 From c541ef911b963a3d49613aecf76abc8f0fa00b67 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 7 Jul 2016 16:15:03 +0200 Subject: Do the right thing for NoDenotation.mapInfo --- src/dotty/tools/dotc/core/SymDenotations.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index e3cbf6669..3a1c589a9 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1824,6 +1824,7 @@ object SymDenotations { override def isType = false override def owner: Symbol = throw new AssertionError("NoDenotation.owner") override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this + override def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = this validFor = Period.allInRun(NoRunId) // will be brought forward automatically } -- cgit v1.2.3 From 78b2672d7440979c36800cbd4f1d7dbf867a4a16 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 5 Jul 2016 17:40:41 +0200 Subject: Add a case where caching was missing. Gave several times improvement for linker. --- src/dotty/tools/dotc/core/SymDenotations.scala | 1 + src/dotty/tools/dotc/core/TypeComparer.scala | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 3a1c589a9..1417347bf 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1641,6 +1641,7 @@ object SymDenotations { */ def isCachable(tp: Type): Boolean = tp match { case _: TypeErasure.ErasedValueType => false + case tp: TypeRef if tp.symbol.isClass => true case tp: TypeVar => tp.inst.exists && inCache(tp.inst) case tp: TypeProxy => inCache(tp.underlying) case tp: AndOrType => inCache(tp.tp1) && inCache(tp.tp2) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index e3c05e55f..fff60f7d5 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -88,7 +88,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { assert(isSatisfiable, constraint.show) } - protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) /*<|<*/ { + protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) { if (tp2 eq NoType) false else if (tp1 eq tp2) true else { @@ -374,14 +374,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } compareRefined case tp2: RecType => - tp1.safeDealias match { + def compareRec = tp1.safeDealias match { case tp1: RecType => val rthis1 = RecThis(tp1) isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1)) case _ => val tp1stable = ensureStableSingleton(tp1) isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) - } + } + compareRec case tp2 @ HKApply(tycon2, args2) => compareHkApply2(tp1, tp2, tycon2, args2) case tp2 @ TypeLambda(tparams2, body2) => -- cgit v1.2.3 From 223705d88fdeee1ef337f8c1c8b3144a643a6268 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 7 Jul 2016 18:33:58 +0200 Subject: Refactor Typevar instantiation It seemed more cohesive to keep the concepts of "instanceType" and "approximation" side-by-side. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 51 ++++++++++++++++++++++ src/dotty/tools/dotc/core/Types.scala | 44 +------------------ 2 files changed, 52 insertions(+), 43 deletions(-) diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index dfce9317b..d21f62440 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -171,6 +171,57 @@ trait ConstraintHandling { inst } + /** The instance type of `param` in the current constraint (which contains `param`). + * If `fromBelow` is true, the instance type is the lub of the parameter's + * lower bounds; otherwise it is the glb of its upper bounds. However, + * a lower bound instantiation can be a singleton type only if the upper bound + * is also a singleton type. + */ + def instanceType(param: PolyParam, fromBelow: Boolean): Type = { + def upperBound = constraint.fullUpperBound(param) + def isSingleton(tp: Type): Boolean = tp match { + case tp: SingletonType => true + case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2) + case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2) + case _ => false + } + def isFullyDefined(tp: Type): Boolean = tp match { + case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt) + case tp: TypeProxy => isFullyDefined(tp.underlying) + case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2) + case _ => true + } + def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match { + case tp: OrType => true + case tp: RefinedOrRecType => isOrType(tp.parent) + case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2) + case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi) + case _ => false + } + + // First, solve the constraint. + var inst = approximation(param, fromBelow) + + // Then, approximate by (1.) - (3.) and simplify as follows. + // 1. If instance is from below and is a singleton type, yet + // upper bound is not a singleton type, widen the instance. + if (fromBelow && isSingleton(inst) && !isSingleton(upperBound)) + inst = inst.widen + + inst = inst.simplified + + // 2. If instance is from below and is a fully-defined union type, yet upper bound + // is not a union type, approximate the union type from above by an intersection + // of all common base types. + if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound)) + inst = inst.approximateUnion + + // 3. If instance is from below, and upper bound has open named parameters + // make sure the instance has all named parameters of the bound. + if (fromBelow) inst = inst.widenToNamedTypeParams(param.namedTypeParams) + inst + } + /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: * diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 786c74069..03fdcb957 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2878,53 +2878,11 @@ object Types { * is also a singleton type. */ def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = { - def upperBound = ctx.typerState.constraint.fullUpperBound(origin) - def isSingleton(tp: Type): Boolean = tp match { - case tp: SingletonType => true - case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2) - case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2) - case _ => false - } - def isFullyDefined(tp: Type): Boolean = tp match { - case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt) - case tp: TypeProxy => isFullyDefined(tp.underlying) - case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2) - case _ => true - } - def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match { - case tp: OrType => true - case tp: RefinedOrRecType => isOrType(tp.parent) - case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2) - case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi) - case _ => false - } - - // First, solve the constraint. - var inst = ctx.typeComparer.approximation(origin, fromBelow) - - // Then, approximate by (1.) - (3.) and simplify as follows. - // 1. If instance is from below and is a singleton type, yet - // upper bound is not a singleton type, widen the instance. - if (fromBelow && isSingleton(inst) && !isSingleton(upperBound)) - inst = inst.widen - - inst = inst.simplified - - // 2. If instance is from below and is a fully-defined union type, yet upper bound - // is not a union type, approximate the union type from above by an intersection - // of all common base types. - if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound)) - inst = inst.approximateUnion - - // 3. If instance is from below, and upper bound has open named parameters - // make sure the instance has all named parameters of the bound. - if (fromBelow) inst = inst.widenToNamedTypeParams(this.namedTypeParams) - + val inst = ctx.typeComparer.instanceType(origin, fromBelow) if (ctx.typerState.isGlobalCommittable) assert(!inst.isInstanceOf[PolyParam], i"bad inst $this := $inst, constr = ${ctx.typerState.constraint}") // If this fails, you might want to turn on Config.debugCheckConstraintsClosed // to help find the root of the problem. - instantiateWith(inst) } -- cgit v1.2.3 From 5e90215a01d74ef9c2b154727dea87ef2dcb3a1b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 8 Jul 2016 15:20:06 +0200 Subject: Drop compareAliasedRefined If there is a new named type param scheme it will almost certainly not need something like compareAliasedRefined. Also: harden printer to compute less and thereby avoid a possible NPE. --- src/dotty/tools/dotc/core/TypeComparer.scala | 39 ++---------------------- src/dotty/tools/dotc/printing/PlainPrinter.scala | 2 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index fff60f7d5..3036cf878 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -364,9 +364,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // This twist is needed to make collection/generic/ParFactory.scala compile fourthTry(tp1, tp2) || compareRefinedSlow case _ => - compareRefinedSlow || - fourthTry(tp1, tp2) || - compareAliasedRefined(tp2, tp1, inOrder = false) // @@@ still needed? + compareRefinedSlow || fourthTry(tp1, tp2) } else // fast path, in particular for refinements resulting from parameterization. isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) && @@ -525,8 +523,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - isNewSubType(tp1.parent, tp2) || - compareAliasedRefined(tp1, tp2, inOrder = true) + isNewSubType(tp1.parent, tp2) case tp1: RecType => isNewSubType(tp1.parent, tp2) case HKApply(tycon1, args1) => @@ -715,32 +712,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { recur(tp1.baseClasses) } - /** Say we are comparing a refined type `P{type M = U}` or `P{type M >: L <: U}`. - * If P#M refers to a BaseTypeArg aliased to some other typeref P#N, - * do the same comparison with `P{type N = U}` or `P{type N >: L <: U}`, respectively. - * This allows to handle situations involving named type params like this one: - * - * trait Lambda[type Elem] - * trait Lst[T] extends Lambda[T] - * - * compareAliasedRefined is necessary so we establish that - * - * Lst[Int] = Lst[Elem = Int] - */ - private def compareAliasedRefined(rt: RefinedType, other: Type, inOrder: Boolean) = { - val mbr = refinedSymbol(rt) - mbr.is(BaseTypeArg) && { - mbr.info match { - case TypeAlias(TypeRef(_, aliasName)) => - val rt1 = rt.derivedRefinedType(rt.parent, aliasName, rt.refinedInfo) - subtyping.println(i"rewiring $rt to $rt1 in comparison with $other") - if (inOrder) isSubType(rt1, other) else isSubType(other, rt1) - case _ => - false - } - } - } - /** Replace any top-level recursive type `{ z => T }` in `tp` with * `[z := anchor]T`. */ @@ -760,9 +731,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { fix(tp) } - /** The symbol referred to in the refinement of `rt` */ - private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol - /** Returns true iff the result of evaluating either `op1` or `op2` is true, * trying at the same time to keep the constraint as wide as possible. * E.g, if @@ -899,8 +867,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match { case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass case tp: ProtoType => false - case tp: RefinedType => isCovered(tp.parent) && !refinedSymbol(tp).is(BaseTypeArg) - case tp: RecType => isCovered(tp.parent) + case tp: RefinedOrRecType => isCovered(tp.parent) case tp: AnnotatedType => isCovered(tp.underlying) case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2) case _ => false diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala index 656650d91..acf4514ea 100644 --- a/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -228,7 +228,7 @@ class PlainPrinter(_ctx: Context) extends Printer { /** If -uniqid is set, the hashcode of the polytype, after a # */ protected def polyHash(pt: GenericType): Text = - "#" + pt.hashCode provided ctx.settings.uniqid.value + if (ctx.settings.uniqid.value) "#" + pt.hashCode else "" /** If -uniqid is set, the unique id of symbol, after a # */ protected def idString(sym: Symbol): String = -- cgit v1.2.3 From 540b38cbd769cee3e8bc72530829e65f7e87cddb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 9 Jul 2016 19:48:39 +0200 Subject: More tests --- tests/neg/hk-variance.scala | 11 +++++++++++ tests/pos/hk-subtyping.scala | 13 +++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 tests/neg/hk-variance.scala create mode 100644 tests/pos/hk-subtyping.scala diff --git a/tests/neg/hk-variance.scala b/tests/neg/hk-variance.scala new file mode 100644 index 000000000..fec5cc366 --- /dev/null +++ b/tests/neg/hk-variance.scala @@ -0,0 +1,11 @@ +object Test { + + def f[C[+X]] = () + + class D[X] {} + + f[D] // error + + def g[E[-Y]] = f[E] // error + +} diff --git a/tests/pos/hk-subtyping.scala b/tests/pos/hk-subtyping.scala new file mode 100644 index 000000000..a004c2618 --- /dev/null +++ b/tests/pos/hk-subtyping.scala @@ -0,0 +1,13 @@ +object Test { + + def compare[S <: T, T] = () + + compare[Int, Int] + compare[Int, Any] + + def f[C <: List] = { + compare[C[Int], List[Int]] + } + + +} -- cgit v1.2.3 From a200695677237922fdf6f995c690cb0108ec2fc4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 11 Jul 2016 14:02:38 +0200 Subject: Fix SI-2712 Allows partially instantiated types as type constrictors when inferring higher-kinded types. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 37 ++++++++++++++- src/dotty/tools/dotc/core/Symbols.scala | 1 + src/dotty/tools/dotc/core/TypeApplications.scala | 6 +-- src/dotty/tools/dotc/core/TypeComparer.scala | 53 ++++++++++++++++------ src/dotty/tools/dotc/core/TypeParamInfo.scala | 3 ++ src/dotty/tools/dotc/core/Types.scala | 2 + tests/neg/i94-nada.scala | 11 +++++ tests/pickling/i94-nada.scala | 4 +- tests/pos/i94-nada.scala | 4 +- tests/pos/t2712-1.scala | 9 ++++ tests/pos/t2712-4.scala | 17 +++++++ tests/pos/t2712-7.scala | 15 ++++++ tests/pos/t5683.scala | 23 ++++++++++ 13 files changed, 163 insertions(+), 22 deletions(-) create mode 100644 tests/neg/i94-nada.scala create mode 100644 tests/pos/t2712-1.scala create mode 100644 tests/pos/t2712-4.scala create mode 100644 tests/pos/t2712-7.scala create mode 100644 tests/pos/t5683.scala diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index d21f62440..1c3bd7384 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -35,6 +35,11 @@ trait ConstraintHandling { /** If the constraint is frozen we cannot add new bounds to the constraint. */ protected var frozenConstraint = false + /** We are currently comparing lambdas. Used as a flag for + * optimization: when `false`, no need to do an expensive `pruneLambdaParams` + */ + protected var comparingLambdas = false + private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean = !constraint.contains(param) || { def occursIn(bound: Type): Boolean = { @@ -289,6 +294,34 @@ trait ConstraintHandling { checkPropagated(s"added $description") { addConstraintInvocations += 1 + /** When comparing lambdas we might get constraints such as + * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter + * and `X0` is a lambda parameter. The constraint for `A` is not allowed + * to refer to such a lambda parameter because the lambda parameter is + * not visible where `A` is defined. Consequently, we need to + * approximate the bound so that the lambda parameter does not appear in it. + * Test case in neg/i94-nada.scala. This test crashes with an illegal instance + * error when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is + * missing. + */ + def pruneLambdaParams(tp: Type) = + if (comparingLambdas) { + val approx = new ApproximatingTypeMap { + def apply(t: Type): Type = t match { + case t @ PolyParam(tl: TypeLambda, n) => + val effectiveVariance = if (fromBelow) -variance else variance + val bounds = tl.paramBounds(n) + if (effectiveVariance > 0) bounds.hi + else if (effectiveVariance < 0 ) bounds.lo + else NoType + case _ => + mapOver(t) + } + } + approx(tp) + } + else tp + def addParamBound(bound: PolyParam) = if (fromBelow) addLess(bound, param) else addLess(param, bound) @@ -336,7 +369,7 @@ trait ConstraintHandling { prune(bound.underlying) case bound: PolyParam => constraint.entry(bound) match { - case NoType => bound + case NoType => pruneLambdaParams(bound) case _: TypeBounds => if (!addParamBound(bound)) NoType else if (fromBelow) defn.NothingType @@ -345,7 +378,7 @@ trait ConstraintHandling { prune(inst) } case _ => - bound + pruneLambdaParams(bound) } try bound match { diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala index df8bc8116..ae88753d0 100644 --- a/src/dotty/tools/dotc/core/Symbols.scala +++ b/src/dotty/tools/dotc/core/Symbols.scala @@ -495,6 +495,7 @@ object Symbols { def paramBounds(implicit ctx: Context) = denot.info.bounds def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds def paramVariance(implicit ctx: Context) = denot.variance + def paramRef(implicit ctx: Context) = denot.typeRef // -------- Printing -------------------------------------------------------- diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index be0c08d15..6e0bf7786 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -341,11 +341,11 @@ class TypeApplications(val self: Type) extends AnyVal { * * TODO: Handle parameterized lower bounds */ - def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { + def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = { def expand(tp: Type) = TypeLambda( - tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( - tl => tparams.map(tparam => tl.lifted(tparams, tparam.info).bounds), + tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.paramVariance))( + tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds), tl => tl.lifted(tparams, tp)) assert(!isHK, self) self match { diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 3036cf878..3a0311977 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -398,7 +398,12 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { // This wpuld mean that there is no convenient means anymore to express a kind // as a supertype. The fix is to delay the checking of bounds so that only // bounds of * types are checked. - variancesConform(tparams1, tparams2) && isSubType(body1, body2.subst(tp2, tp1)) + val saved = comparingLambdas + comparingLambdas = true + try + variancesConform(tparams1, tparams2) && + isSubType(body1, body2.subst(tp2, tp1)) + finally comparingLambdas = saved case _ => if (!tp1.isHK) { tp2 match { @@ -592,32 +597,54 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } - /** `param2` can be instantiated to the type constructor of the LHS - * or to the type constructor of one of the LHS base class instances + /** `param2` can be instantiated to a type application prefix of the LHS + * or to a type application prefix of one of the LHS base class instances * and the resulting type application is a supertype of `tp1`, * or fallback to fourthTry. */ def canInstantiate(param2: PolyParam): Boolean = { - /** `param2` can be instantiated to `tycon1a`. - * and the resulting type application is a supertype of `tp1`. + /** Let + * + * `tparams_1, ..., tparams_k-1` be the type parameters of the rhs + * `tparams1_1, ..., tparams1_n-1` be the type parameters of the constructor of the lhs + * `args1_1, ..., args1_n-1` be the type arguments of the lhs + * `d = n - k` + * + * Returns `true` iff `d >= 0` and `param2` can be instantiated to + * + * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1] + * + * such that the resulting type application is a supertype of `tp1`. */ - def tyconOK(tycon1a: Type) = - variancesConform(tycon1a.typeParams, tparams) && { - (ctx.mode.is(Mode.TypevarsMissContext) || - tryInstantiate(param2, tycon1a.ensureHK)) && - isSubType(tp1, tycon1a.appliedTo(args2)) + def tyconOK(tycon1a: Type, args1: List[Type]) = { + var tycon1b = tycon1a + val tparams1a = tycon1a.typeParams + val lengthDiff = tparams1a.length - tparams.length + lengthDiff >= 0 && { + val tparams1 = tparams1a.drop(lengthDiff) + variancesConform(tparams1, tparams) && { + if (lengthDiff > 0) + tycon1b = tycon1a + .appliedTo(args1.take(lengthDiff) ++ tparams1.map(_.paramRef)) + .LambdaAbstract(tparams1) + (ctx.mode.is(Mode.TypevarsMissContext) || + tryInstantiate(param2, tycon1b.ensureHK)) && + isSubType(tp1, tycon1b.appliedTo(args2)) + } } + } tp1.widen match { - case tp1w @ HKApply(tycon1, _) => - tyconOK(tycon1) + case tp1w @ HKApply(tycon1, args1) => + tyconOK(tycon1, args1) case tp1w => tp1w.typeSymbol.isClass && { val classBounds = tycon2.classSymbols def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match { case bc :: bcs1 => - classBounds.exists(bc.derivesFrom) && tyconOK(tp1w.baseTypeRef(bc)) || + classBounds.exists(bc.derivesFrom) && + tyconOK(tp1w.baseTypeRef(bc), tp1w.baseArgInfos(bc)) || liftToBase(bcs1) case _ => false diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala index ff3c8fca7..6bec2e0e0 100644 --- a/src/dotty/tools/dotc/core/TypeParamInfo.scala +++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala @@ -26,4 +26,7 @@ trait TypeParamInfo { /** The variance of the type parameter */ def paramVariance(implicit ctx: Context): Int + + /** A type that refers to the parameter */ + def paramRef(implicit ctx: Context): Type } \ No newline at end of file diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 03fdcb957..728f7fc21 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2614,12 +2614,14 @@ object Types { def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds def paramVariance(implicit ctx: Context): Int = tl.variances(n) def toArg: Type = PolyParam(tl, n) + def paramRef(implicit ctx: Context): Type = PolyParam(tl, n) } object TypeLambda { def apply(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)(implicit ctx: Context): TypeLambda = { unique(new TypeLambda(paramNames, variances)(paramBoundsExp, resultTypeExp)) } + def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) = if (tparams.isEmpty) resultType else apply(tparams map (_.name.asTypeName), tparams.map(_.variance))( diff --git a/tests/neg/i94-nada.scala b/tests/neg/i94-nada.scala new file mode 100644 index 000000000..8ca104e06 --- /dev/null +++ b/tests/neg/i94-nada.scala @@ -0,0 +1,11 @@ +trait Test1 { + trait Monad[MX] { + def x: MX + } + sealed abstract class Either[A,B] + case class Left[A,B](x: A) extends Either[A,B] with Monad[A] + case class Right[A,B](x: B) extends Either[A,B] with Monad[B] + def flatMap[FX,FY,M[FMX]<:Monad[FMX]](m: M[FX], f: FX => M[FY]): M[FY] = f(m.x) + println(flatMap(Left(1), {x: Int => Left(x)})) // error: Left does not conform to [X] -> Monad[X] + +} diff --git a/tests/pickling/i94-nada.scala b/tests/pickling/i94-nada.scala index ce8dc98ad..cf39ee2ae 100644 --- a/tests/pickling/i94-nada.scala +++ b/tests/pickling/i94-nada.scala @@ -27,7 +27,7 @@ trait Test1 { case class Left[A,B](x: A) extends Either[A,B] with Monad[A] case class Right[A,B](x: B) extends Either[A,B] with Monad[B] def flatMap[X,Y,M[X]<:Monad[X]](m: M[X], f: X => M[Y]): M[Y] = f(m.x) - println(flatMap(Left(1), {x: Int => Left(x)})) + println(flatMap(Right(1), {x: Int => Right(x)})) } trait Test2 { trait Monad[X] { @@ -37,7 +37,7 @@ trait Test2 { case class Left[A,B](x: A) extends Either[A,B] with Monad[A] case class Right[A,B](x: B) extends Either[A,B] with Monad[B] def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] - println(flatMap(Left(1), {x: Int => Left(x)})) + println(flatMap(Right(1), {x: Int => Right(x)})) } trait Test3 { def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] diff --git a/tests/pos/i94-nada.scala b/tests/pos/i94-nada.scala index f8263ccf2..1c7d88a10 100644 --- a/tests/pos/i94-nada.scala +++ b/tests/pos/i94-nada.scala @@ -25,7 +25,7 @@ trait Test1 { case class Left[A,B](x: A) extends Either[A,B] with Monad[A] case class Right[A,B](x: B) extends Either[A,B] with Monad[B] def flatMap[X,Y,M[X]<:Monad[X]](m: M[X], f: X => M[Y]): M[Y] = f(m.x) - println(flatMap(Left(1), {x: Int => Left(x)})) + println(flatMap(Right(1), {x: Int => Right(x)})) } trait Test2 { trait Monad[X] { @@ -35,7 +35,7 @@ trait Test2 { case class Left[A,B](x: A) extends Either[A,B] with Monad[A] case class Right[A,B](x: B) extends Either[A,B] with Monad[B] def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] - println(flatMap(Left(1), {x: Int => Left(x)})) + println(flatMap(Right(1), {x: Int => Right(x)})) } trait Test3 { def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] diff --git a/tests/pos/t2712-1.scala b/tests/pos/t2712-1.scala new file mode 100644 index 000000000..4f84c9df5 --- /dev/null +++ b/tests/pos/t2712-1.scala @@ -0,0 +1,9 @@ +package test + +// Original test case from, +// +// https://issues.scala-lang.org/browse/SI-2712 +object Test { + def meh[M[_], A](x: M[A]): M[A] = x + meh{(x: Int) => x} // solves ?M = [X] Int => X and ?A = Int ... +} diff --git a/tests/pos/t2712-4.scala b/tests/pos/t2712-4.scala new file mode 100644 index 000000000..3e2e5cdda --- /dev/null +++ b/tests/pos/t2712-4.scala @@ -0,0 +1,17 @@ +package test + +object Test1 { + trait X + trait Y extends X + class Foo[T, U <: X] + def meh[M[_ <: A], A](x: M[A]): M[A] = x + meh(new Foo[Int, Y]) +} + +object Test2 { + trait X + trait Y extends X + class Foo[T, U >: Y] + def meh[M[_ >: A], A](x: M[A]): M[A] = x + meh(new Foo[Int, X]) +} diff --git a/tests/pos/t2712-7.scala b/tests/pos/t2712-7.scala new file mode 100644 index 000000000..d9c5243f1 --- /dev/null +++ b/tests/pos/t2712-7.scala @@ -0,0 +1,15 @@ +package test + +// Cats Xor, Scalaz \/, scala.util.Either +sealed abstract class Xor[+A, +B] extends Product with Serializable +object Xor { + final case class Left[+A](a: A) extends (A Xor Nothing) + final case class Right[+B](b: B) extends (Nothing Xor B) +} + +object TestXor { + import Xor._ + def meh[F[_], A, B](fa: F[A])(f: A => B): F[B] = ??? + meh(new Right(23): Xor[Boolean, Int])(_ < 13) + meh(new Left(true): Xor[Boolean, Int])(_ < 13) +} diff --git a/tests/pos/t5683.scala b/tests/pos/t5683.scala new file mode 100644 index 000000000..05ab03579 --- /dev/null +++ b/tests/pos/t5683.scala @@ -0,0 +1,23 @@ +object Test { + trait NT[X] + trait W[W, A] extends NT[Int] + type StringW[T] = W[String, T] + trait K[M[_], A, B] + + def k[M[_], B](f: Int => M[B]): K[M, Int, B] = null + + val okay1: K[StringW,Int,Int] = k{ (y: Int) => null: StringW[Int] } + val okay2 = k[StringW,Int]{ (y: Int) => null: W[String, Int] } + + val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } + + // remove `extends NT[Int]`, and the last line gives an inference error + // rather than a crash. + // test/files/pos/t5683.scala:12: error: no type parameters for method k: (f: Int => M[B])Test.K[M,Int,B] exist so that it can be applied to arguments (Int => Test.W[String,Int]) + // --- because --- + // argument expression's type is not compatible with formal parameter type; + // found : Int => Test.W[String,Int] + // required: Int => ?M[?B] + // val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } + // ^ +} -- cgit v1.2.3 From 6d7bc4996d6ad2095442ebc43f59307448226fd7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 11 Jul 2016 17:02:16 +0200 Subject: Address reviewers comments --- src/dotty/tools/dotc/core/Constraint.scala | 2 +- src/dotty/tools/dotc/core/ConstraintHandling.scala | 8 +------ src/dotty/tools/dotc/core/Signature.scala | 4 ++-- src/dotty/tools/dotc/core/SymDenotations.scala | 13 +++++------ src/dotty/tools/dotc/core/Symbols.scala | 1 + src/dotty/tools/dotc/core/TypeApplications.scala | 25 ++++++++++------------ src/dotty/tools/dotc/core/TypeComparer.scala | 19 +++++++++------- src/dotty/tools/dotc/core/TypeParamInfo.scala | 14 +++++++++--- src/dotty/tools/dotc/core/Types.scala | 11 +++++----- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 4 ++-- src/dotty/tools/dotc/sbt/ExtractAPI.scala | 3 ++- 11 files changed, 55 insertions(+), 49 deletions(-) diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala index e480d1bfe..e10523753 100644 --- a/src/dotty/tools/dotc/core/Constraint.scala +++ b/src/dotty/tools/dotc/core/Constraint.scala @@ -119,7 +119,7 @@ abstract class Constraint extends Showable { /** Is entry associated with `pt` removable? This is the case if * all type parameters of the entry are associated with type variables - * which have its `inst` fields set. + * which have their `inst` fields set. */ def isRemovable(pt: GenericType): Boolean diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1c3bd7384..c5e3bad40 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -385,13 +385,7 @@ trait ConstraintHandling { case bound: PolyParam if constraint contains bound => addParamBound(bound) case _ => - var pbound = prune(bound) - if (pbound.isHK && !param.isHK) { - param match { - case EtaExpansion(tycon) if tycon.symbol.isClass => pbound = tycon - case _ => - } - } + val pbound = prune(bound) pbound.exists && ( if (fromBelow) addLowerBound(param, pbound) else addUpperBound(param, pbound)) } diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala index c0647ad1d..b2e627cbe 100644 --- a/src/dotty/tools/dotc/core/Signature.scala +++ b/src/dotty/tools/dotc/core/Signature.scala @@ -50,7 +50,7 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { /** The degree to which this signature matches `that`. * If parameter names are consistent and result types names match (i.e. they are the same * or one is a wildcard), the result is `FullMatch`. - * If only the parameter names are constistent, the result is `ParamMatch` before erasure and + * If only the parameter names are consistent, the result is `ParamMatch` before erasure and * `NoMatch` otherwise. * If the parameters are inconsistent, the result is always `NoMatch`. */ @@ -71,7 +71,7 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) { Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig) /** A signature is under-defined if its paramsSig part contains at least one - * `tpnme.Uninstantited`. Under-defined signatures arise when taking a signature + * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature * of a type that still contains uninstantiated type variables. They are eliminated * by `fixSignature` in `PostTyper`. */ diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 1417347bf..16c77ac30 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1209,7 +1209,7 @@ object SymDenotations { private[this] var myNamedTypeParams: Set[TypeSymbol] = _ /** The type parameters in this class, in the order they appear in the current - * scope `decls`. This is might be temporarily the incorrect order when + * scope `decls`. This might be temporarily the incorrect order when * reading Scala2 pickled info. The problem is fixed by `updateTypeParams` * which is called once an unpickled symbol has been completed. */ @@ -1543,18 +1543,19 @@ object SymDenotations { } /** Make sure the type parameters of this class appear in the order given - * by `tparams` in the scope of the class. Reorder definitions in scope if necessary. - * @pre All type parameters in `tparams` are entered in class scope `info.decls`. + * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary. */ - def updateTypeParams(tparams: List[Symbol])(implicit ctx: Context): Unit = - if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(typeParams)(_.name == _.name)) { + def ensureTypeParamsInCorrectOrder()(implicit ctx: Context): Unit = { + val tparams = typeParams + if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) { val decls = info.decls val decls1 = newScope - for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name)) + for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name)) for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym) info = classInfo.derivedClassInfo(decls = decls1) myTypeParams = null } + } /** All members of this class that have the given name. * The elements of the returned pre-denotation all diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala index ae88753d0..229df4576 100644 --- a/src/dotty/tools/dotc/core/Symbols.scala +++ b/src/dotty/tools/dotc/core/Symbols.scala @@ -494,6 +494,7 @@ object Symbols { def paramName(implicit ctx: Context): Name = name def paramBounds(implicit ctx: Context) = denot.info.bounds def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds + def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter def paramVariance(implicit ctx: Context) = denot.variance def paramRef(implicit ctx: Context) = denot.typeRef diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 6e0bf7786..09f006a11 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -41,7 +41,7 @@ object TypeApplications { /** Does variance `v1` conform to variance `v2`? * This is the case if the variances are the same or `sym` is nonvariant. */ - def varianceConforms(v1: Int, v2: Int)(implicit ctx: Context): Boolean = + def varianceConforms(v1: Int, v2: Int): Boolean = v1 == v2 || v2 == 0 /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`? @@ -49,7 +49,7 @@ object TypeApplications { def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean = varianceConforms(tparam1.paramVariance, tparam2.paramVariance) - /** Doe the variances of type parameters `tparams1` conform to the variances + /** Do the variances of type parameters `tparams1` conform to the variances * of corresponding type parameters `tparams2`? * This is only the case of `tparams1` and `tparams2` have the same length. */ @@ -116,17 +116,11 @@ object TypeApplications { /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK */ - def etaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] = + def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] = if (tparams.isEmpty) args - else { - def bounds(tparam: TypeParamInfo) = tparam match { - case tparam: Symbol => tparam.infoOrCompleter - case tparam: LambdaParam => tparam.paramBounds - } - args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(bounds(tparam))) - } + else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramBoundsOrCompleter)) - /** A type map that tries to reduce a (part of) the result type of the type lambda `tycon` + /** A type map that tries to reduce (part of) the result type of the type lambda `tycon` * with the given `args`(some of which are wildcard arguments represented by type bounds). * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions. @@ -166,7 +160,7 @@ object TypeApplications { * produce a higher-kinded application with a type lambda as type constructor. */ class Reducer(tycon: TypeLambda, args: List[Type])(implicit ctx: Context) extends TypeMap { - private var available = Set((0 until args.length): _*) + private var available = (0 until args.length).toSet var allReplaced = true def hasWildcardArg(p: PolyParam) = p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds] @@ -320,7 +314,10 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeLambda => true case self: HKApply => false case self: SingletonType => false - case self: TypeVar => self.origin.isHK // discrepancy with typeParams, why? + case self: TypeVar => + // Using `origin` instead of `underlying`, as is done for typeParams, + // avoids having to set ephemeral in some cases. + self.origin.isHK case self: WildcardType => self.optBounds.isHK case self: TypeProxy => self.underlying.isHK case _ => false @@ -378,7 +375,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (isHK) self else EtaExpansion(self) /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ - def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = { + def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = { val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self else self match { diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 3a0311977..faa4e1b16 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -531,7 +531,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { isNewSubType(tp1.parent, tp2) case tp1: RecType => isNewSubType(tp1.parent, tp2) - case HKApply(tycon1, args1) => + case tp1 @ HKApply(tycon1, args1) => compareHkApply1(tp1, tycon1, args1, tp2) case EtaExpansion(tycon1) => isSubType(tycon1, tp2) @@ -567,10 +567,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Subtype test for the hk application `tp2 = tycon2[args2]`. */ - def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { + def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = { val tparams = tycon2.typeParams assert(tparams.nonEmpty) + /** True if `tp1` and `tp2` have compatible type constructors and their + * corresponding arguments are subtypes relative to their variance (see `isSubArgs`). + */ def isMatchingApply(tp1: Type): Boolean = tp1 match { case HKApply(tycon1, args1) => tycon1.dealias match { @@ -602,7 +605,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * and the resulting type application is a supertype of `tp1`, * or fallback to fourthTry. */ - def canInstantiate(param2: PolyParam): Boolean = { + def canInstantiate(tycon2: PolyParam): Boolean = { /** Let * @@ -611,7 +614,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * `args1_1, ..., args1_n-1` be the type arguments of the lhs * `d = n - k` * - * Returns `true` iff `d >= 0` and `param2` can be instantiated to + * Returns `true` iff `d >= 0` and `tycon2` can be instantiated to * * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1] * @@ -629,7 +632,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { .appliedTo(args1.take(lengthDiff) ++ tparams1.map(_.paramRef)) .LambdaAbstract(tparams1) (ctx.mode.is(Mode.TypevarsMissContext) || - tryInstantiate(param2, tycon1b.ensureHK)) && + tryInstantiate(tycon2, tycon1b.ensureHK)) && isSubType(tp1, tycon1b.appliedTo(args2)) } } @@ -690,7 +693,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** Subtype test for the hk application `tp1 = tycon1[args1]`. */ - def compareHkApply1(tp1: Type, tycon1: Type, args1: List[Type], tp2: Type): Boolean = + def compareHkApply1(tp1: HKApply, tycon1: Type, args1: List[Type], tp2: Type): Boolean = tycon1 match { case param1: PolyParam => def canInstantiate = tp2 match { @@ -716,7 +719,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val v = tparams.head.paramVariance (v > 0 || isSubType(args2.head, args1.head)) && (v < 0 || isSubType(args1.head, args2.head)) - } + } && isSubArgs(args1.tail, args2.tail, tparams) /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, @@ -1522,7 +1525,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx) - override def compareHkApply2(tp1: Type, tp2: Type, tycon2: Type, args2: List[Type]): Boolean = { + override def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = { def addendum = "" traceIndented(i"compareHkApply $tp1, $tp2$addendum") { super.compareHkApply2(tp1, tp2, tycon2, args2) diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala index 6bec2e0e0..1d79e4204 100644 --- a/src/dotty/tools/dotc/core/TypeParamInfo.scala +++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala @@ -9,7 +9,9 @@ import Types.{Type, TypeBounds} */ trait TypeParamInfo { - /** Is this the info of a type parameter? Might be wrong for symbols */ + /** Is this the info of a type parameter? Will return `false` for symbols + * that are not type parameters. + */ def isTypeParam(implicit ctx: Context): Boolean /** The name of the type parameter */ @@ -19,10 +21,16 @@ trait TypeParamInfo { def paramBounds(implicit ctx: Context): TypeBounds /** The info of the type parameter as seen from a prefix type. - * This can be different from `memberInfo` if the binding - * is a type symbol of a class. + * For type parameter symbols, this is the `memberInfo` as seen from `prefix`. + * For type lambda parameters, it's the same as `paramBounds` as + * `asSeenFrom` has already been applied to the whole type lambda. */ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds + + /** The parameter bounds, or the completer if the type parameter + * is an as-yet uncompleted symbol. + */ + def paramBoundsOrCompleter(implicit ctx: Context): Type /** The variance of the type parameter */ def paramVariance(implicit ctx: Context): Int diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 728f7fc21..63f39637b 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -452,7 +452,7 @@ object Types { // We have to be careful because we might open the same (wrt eq) recursive type // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)` // call below. To avoid this problem we do a defensive copy of the recursive - // type first. But if we do this always we risk being inefficient and we run into + // type first. But if we do this always we risk being inefficient and we ran into // stackoverflows when compiling pos/hk.scala under the refinement encoding // of hk-types. So we only do a copy if the type // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`. @@ -470,7 +470,7 @@ object Types { // // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) // - // Without the without the `openedTwice` trick, Typer.scala fails to Ycheck + // Without the `openedTwice` trick, Typer.scala fails to Ycheck // at phase resolveSuper. val rt = if (tp.opened) { // defensive copy @@ -2510,7 +2510,7 @@ object Types { else duplicate(paramNames, paramBounds, resType) /** PolyParam references to all type parameters of this type */ - def paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _)) + lazy val paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _)) /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters @@ -2518,7 +2518,7 @@ object Types { def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type = tparams match { case LambdaParam(poly, _) :: _ => tp.subst(poly, this) - case tparams: List[Symbol] => tp.subst(tparams, paramRefs) + case tparams: List[Symbol @unchecked] => tp.subst(tparams, paramRefs) } override def equals(other: Any) = other match { @@ -2612,6 +2612,7 @@ object Types { def paramName(implicit ctx: Context): TypeName = tl.paramNames(n) def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n) def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds + def paramBoundsOrCompleter(implicit ctx: Context): Type = paramBounds def paramVariance(implicit ctx: Context): Int = tl.variances(n) def toArg: Type = PolyParam(tl, n) def paramRef(implicit ctx: Context): Type = PolyParam(tl, n) @@ -2643,7 +2644,7 @@ object Types { override def superType(implicit ctx: Context): Type = tycon match { case tp: TypeLambda => defn.AnyType - case tp: TypeProxy => tp.superType.appliedTo(args) + case tp: TypeProxy => tp.superType.applyIfParameterized(args) case _ => defn.AnyType } /* diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index e8f3d63a9..3dbeb4040 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -134,7 +134,7 @@ object Scala2Unpickler { denot.info = ClassInfo( // final info, except possibly for typeparams ordering denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost) - denot.updateTypeParams(tparams) + denot.ensureTypeParamsInCorrectOrder() } } @@ -733,7 +733,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else TypeRef(pre, sym.name.asTypeName) val args = until(end, readTypeRef) if (sym == defn.ByNameParamClass2x) ExprType(args.head) - else if (args.nonEmpty) tycon.safeAppliedTo(etaExpandIfHK(sym.typeParams, args)) + else if (args.nonEmpty) tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args)) else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala index d4b38c66e..26611ef43 100644 --- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -404,13 +404,14 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder case tp: ThisType => apiThis(tp.cls) case RecThis(binder) => - apiThis(binder.typeSymbol) // !!! this is almost certainly wrong !!! + apiThis(binder.typeSymbol) // !!! this is almost certainly wrong: binder does not always have a typeSymbol !!! case tp: ParamType => new api.ParameterRef(tp.paramName.toString) case tp: LazyRef => apiType(tp.ref) case tp: TypeVar => apiType(tp.underlying) + // !!! missing cases: TypeLambda, HKApply case _ => { ctx.warning(i"sbt-api: Unhandled type ${tp.getClass} : $tp") Constants.emptyType -- cgit v1.2.3 From 055726e0cbbefa56ddbec35b0c58a7000fe97ebf Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 12 Jul 2016 04:04:01 +0100 Subject: ExtractAPI: Do not miss value parameters of PolyTypes This bug has been present since we merged this phase. In the new test `signature-change`, only "Case 1" did not pass before. --- .../source-dependencies/signature-change/B.scala | 3 +++ .../signature-change/changes/A0.scala | 3 +++ .../signature-change/changes/A1.scala | 3 +++ .../signature-change/changes/A2.scala | 3 +++ .../signature-change/changes/A3.scala | 3 +++ .../project/DottyInjectedPlugin.scala | 17 +++++++++++++++++ .../source-dependencies/signature-change/test | 20 ++++++++++++++++++++ src/dotty/tools/dotc/sbt/ExtractAPI.scala | 3 +++ 8 files changed, 55 insertions(+) create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/B.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala create mode 100644 bridge/src/sbt-test/source-dependencies/signature-change/test diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/B.scala b/bridge/src/sbt-test/source-dependencies/signature-change/B.scala new file mode 100644 index 000000000..e049b23b0 --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/B.scala @@ -0,0 +1,3 @@ +object B { + val x: Int = A.f1[Any](1) +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala new file mode 100644 index 000000000..6cf6a62c2 --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala @@ -0,0 +1,3 @@ +object A { + def f1[T](x: Int): Int = 1 +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala new file mode 100644 index 000000000..d10bcbadc --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala @@ -0,0 +1,3 @@ +object A { + def f1[T](x: String): Int = 1 +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala new file mode 100644 index 000000000..029dc28d7 --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala @@ -0,0 +1,3 @@ +object A { + def f1[T](x: Int): String = "" +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala new file mode 100644 index 000000000..69463721f --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala @@ -0,0 +1,3 @@ +object A { + def f1[T <: Int](x: Int): Int = 1 +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala b/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000..3433779b6 --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala @@ -0,0 +1,17 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := "0.1-SNAPSHOT", + scalaOrganization := "ch.epfl.lamp", + scalacOptions += "-language:Scala2", + scalaBinaryVersion := "2.11", + autoScalaLibrary := false, + libraryDependencies ++= Seq("org.scala-lang" % "scala-library" % "2.11.5"), + scalaCompilerBridgeSource := ("ch.epfl.lamp" % "dotty-bridge" % "0.1.1-SNAPSHOT" % "component").sources() + ) +} diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/test b/bridge/src/sbt-test/source-dependencies/signature-change/test new file mode 100644 index 000000000..03ad663ab --- /dev/null +++ b/bridge/src/sbt-test/source-dependencies/signature-change/test @@ -0,0 +1,20 @@ +# Case 1: parameter type changed +$ copy-file changes/A0.scala A.scala +> compile +$ copy-file changes/A1.scala A.scala +# Compilation of B.scala should fail because the signature of f changed +-> compile + +# Case 2: return type changed +$ copy-file changes/A0.scala A.scala +> compile +$ copy-file changes/A2.scala A.scala +# Compilation of B.scala should fail because the signature of f changed +-> compile + +# Case 3: type parameter bounds changed +$ copy-file changes/A0.scala A.scala +> compile +$ copy-file changes/A3.scala A.scala +# Compilation of B.scala should fail because the signature of f changed +-> compile diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 26611ef43..a9586879f 100644 --- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -272,6 +272,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder def apiDef(sym: TermSymbol): api.Def = { def paramLists(t: Type, start: Int = 0): List[api.ParameterList] = t match { + case pt: PolyType => + assert(start == 0) + paramLists(pt.resultType) case mt @ MethodType(pnames, ptypes) => // TODO: We shouldn't have to work so hard to find the default parameters // of a method, Dotty should expose a convenience method for that, see #1143 -- cgit v1.2.3 From a6a142e772b43fb44ba7086ba1005e983059285a Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 12 Jul 2016 04:48:47 +0100 Subject: ExtractAPI: Add support for TypeLambdas Before the new higher-kinded implementation this wasn't needed because lambdas were just RefinedTypes. --- src/dotty/tools/dotc/sbt/ExtractAPI.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala index a9586879f..caba96afa 100644 --- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -174,9 +174,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName - val tparams = sym.typeParams.map(tparam => apiTypeParameter( - tparam.name.toString, tparam.variance, - tparam.info.bounds.lo, tparam.info.bounds.lo)) + val tparams = sym.typeParams.map(apiTypeParameter) val structure = apiClassStructure(sym) @@ -364,6 +362,10 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder val apiTycon = simpleType(tycon) val apiArgs = args.map(processArg) new api.Parameterized(apiTycon, apiArgs.toArray) + case TypeLambda(tparams, res) => + val apiTparams = tparams.map(apiTypeParameter) + val apiRes = apiType(res) + new api.Polymorphic(apiRes, apiTparams.toArray) case rt: RefinedType => val name = rt.refinedName.toString val parent = apiType(rt.parent) @@ -409,12 +411,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder case RecThis(binder) => apiThis(binder.typeSymbol) // !!! this is almost certainly wrong: binder does not always have a typeSymbol !!! case tp: ParamType => + // TODO: Distinguishing parameters based on their names alone is not enough, + // the binder is also needed (at least for type lambdas). new api.ParameterRef(tp.paramName.toString) case tp: LazyRef => apiType(tp.ref) case tp: TypeVar => apiType(tp.underlying) - // !!! missing cases: TypeLambda, HKApply case _ => { ctx.warning(i"sbt-api: Unhandled type ${tp.getClass} : $tp") Constants.emptyType @@ -437,6 +440,10 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) } + def apiTypeParameter(tparam: TypeParamInfo): api.TypeParameter = + apiTypeParameter(tparam.paramName.toString, tparam.paramVariance, + tparam.paramBounds.lo, tparam.paramBounds.hi) + def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter = new api.TypeParameter(name, Array(), Array(), apiVariance(variance), apiType(lo), apiType(hi)) -- cgit v1.2.3 From 1792c9e9bcff1feba7b50a24a46e1e20d8a39d9b Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 12 Jul 2016 04:51:53 +0100 Subject: ExtractAPI: Add support for RecType --- src/dotty/tools/dotc/sbt/ExtractAPI.scala | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala index caba96afa..437e36bb9 100644 --- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -387,6 +387,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder Array() } new api.Structure(strict2lzy(Array(parent)), strict2lzy(decl), strict2lzy(Array())) + case tp: RecType => + apiType(tp.parent) + case RecThis(recType) => + // `tp` must be present inside `recType`, so calling `apiType` on + // `recType` would lead to an infinite recursion, we avoid this by + // computing the representation of `recType` lazily. + apiLazy(recType) case tp: AndOrType => val parents = List(apiType(tp.tp1), apiType(tp.tp2)) @@ -408,8 +415,6 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder apiType(tpe) case tp: ThisType => apiThis(tp.cls) - case RecThis(binder) => - apiThis(binder.typeSymbol) // !!! this is almost certainly wrong: binder does not always have a typeSymbol !!! case tp: ParamType => // TODO: Distinguishing parameters based on their names alone is not enough, // the binder is also needed (at least for type lambdas). @@ -434,6 +439,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder Constants.emptyType } + def apiLazy(tp: => Type): api.Type = { + // TODO: The sbt api needs a convenient way to make a lazy type. + // For now, we repurpose Structure for this. + val apiTp = lzy(Array(apiType(tp))) + new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array())) + } + def apiThis(sym: Symbol): api.Singleton = { val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot) .map(s => new api.Id(s.name.toString)) -- cgit v1.2.3 From cdebd91712b36b048233d7cf9501cc7a5bb50b31 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 11 Jul 2016 18:05:36 +0200 Subject: Allow definition of new types in refinements Allow definition of types in refinements that do not appear in parent type. --- src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/neg/i39.scala | 2 +- tests/neg/i50-volatile.scala | 4 ++-- tests/neg/subtyping.scala | 2 +- tests/neg/zoo.scala | 12 ++++++------ tests/pos/t2712-2.scala | 25 +++++++++++++++++++++++++ tests/pos/t2712-5.scala | 29 +++++++++++++++++++++++++++++ 7 files changed, 65 insertions(+), 11 deletions(-) create mode 100644 tests/pos/t2712-2.scala create mode 100644 tests/pos/t2712-5.scala diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index da176427a..62d356e3d 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -913,7 +913,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol - if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty) + if (rsym.is(Method) && rsym.allOverriddenSymbols.isEmpty) ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos) val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info RefinedType(parent, rsym.name, rinfo) diff --git a/tests/neg/i39.scala b/tests/neg/i39.scala index df53d9816..8a13a7d06 100644 --- a/tests/neg/i39.scala +++ b/tests/neg/i39.scala @@ -1,7 +1,7 @@ object i39neg { trait B { - type D <: { type T } // error + type D <: { type T } def d: D } diff --git a/tests/neg/i50-volatile.scala b/tests/neg/i50-volatile.scala index f6fa3466d..fcfc9592b 100644 --- a/tests/neg/i50-volatile.scala +++ b/tests/neg/i50-volatile.scala @@ -3,10 +3,10 @@ class Test { class Inner } type A <: Base { - type X = String // error + type X = String // old-error } type B <: { - type X = Int // error + type X = Int // old-error } lazy val o: A & B = ??? diff --git a/tests/neg/subtyping.scala b/tests/neg/subtyping.scala index 27cc0568e..351fa0ecd 100644 --- a/tests/neg/subtyping.scala +++ b/tests/neg/subtyping.scala @@ -8,7 +8,7 @@ object Test { implicitly[B#X <:< A#X] // error: no implicit argument } def test2(): Unit = { - val a : { type T; type U } = ??? // error // error + val a : { type T; type U } = ??? implicitly[a.T <:< a.U] // error: no implicit argument } } diff --git a/tests/neg/zoo.scala b/tests/neg/zoo.scala index 3d9b77b72..19efcc1d7 100644 --- a/tests/neg/zoo.scala +++ b/tests/neg/zoo.scala @@ -1,23 +1,23 @@ object Test { type Meat = { - type IsMeat = Any // error + type IsMeat = Any } type Grass = { - type IsGrass = Any // error + type IsGrass = Any } type Animal = { - type Food // error + type Food def eats(food: Food): Unit // error def gets: Food // error } type Cow = { - type IsMeat = Any // error - type Food <: Grass // error + type IsMeat = Any + type Food <: Grass def eats(food: Grass): Unit // error def gets: Grass // error } type Lion = { - type Food = Meat // error + type Food = Meat def eats(food: Meat): Unit // error def gets: Meat // error } diff --git a/tests/pos/t2712-2.scala b/tests/pos/t2712-2.scala new file mode 100644 index 000000000..95172545d --- /dev/null +++ b/tests/pos/t2712-2.scala @@ -0,0 +1,25 @@ +package test + +// See: https://github.com/milessabin/si2712fix-demo/issues/3 +object Test { + trait A[T1, T2] { } + trait B[T1, T2] { } + class C[T] extends A[T, Long] with B[T, Double] + class CB extends A[Boolean, Long] with B[Boolean, Double] + + trait A2[T] + trait B2[T] + class C2[T] extends A2[T] with B2[T] + class CB2 extends A2[Boolean] with B2[Boolean] + + def meh[M[_], A](x: M[A]): M[A] = x + + val m0 = meh(new C[Boolean]) + m0: C[Boolean] + val m1 = meh(new CB) + m1: B[Boolean, Double] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order. + val m2 = meh(new C2[Boolean]) + m2: C2[Boolean] + val m3 = meh(new CB2) + m3: B2[Boolean] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order. +} diff --git a/tests/pos/t2712-5.scala b/tests/pos/t2712-5.scala new file mode 100644 index 000000000..ed96d4c06 --- /dev/null +++ b/tests/pos/t2712-5.scala @@ -0,0 +1,29 @@ +package test + +import scala.language.higherKinds + +trait Functor[F[_]] { + def map[A, B](f: A => B, fa: F[A]): F[B] +} + +object Functor { + implicit def function[A]: Functor[({ type l[B] = A => B })#l] = + new Functor[({ type l[B] = A => B })#l] { + def map[C, B](cb: C => B, ac: A => C): A => B = cb compose ac + } +} + +object FunctorSyntax { + implicit class FunctorOps[F[_], A](fa: F[A])(implicit F: Functor[F]) { + def map[B](f: A => B): F[B] = F.map(f, fa) + } +} + +object Test { + + val f: Int => String = _.toString + + import FunctorSyntax._ + + f.map((s: String) => s.reverse) +} -- cgit v1.2.3 From 84a1a7ae7b1e4931fe04a5a21a04bb858e8acebb Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 12 Jul 2016 17:12:59 +0200 Subject: Avoid dealiasing on type application When applying a type alias of a type lambda, keep the original application instead of reducing. But reduce anyway if - the reduced type is an application where the type constructor has the same kind as the original type constructor, or - some of the arguments are wildcards. --- src/dotty/tools/dotc/core/ConstraintHandling.scala | 12 ++-- src/dotty/tools/dotc/core/Symbols.scala | 2 +- src/dotty/tools/dotc/core/TypeApplications.scala | 81 +++++++++++++--------- src/dotty/tools/dotc/core/TypeComparer.scala | 7 +- src/dotty/tools/dotc/core/TypeParamInfo.scala | 10 +-- src/dotty/tools/dotc/core/Types.scala | 30 ++++++-- tests/neg/boundspropagation.scala | 2 +- tests/neg/i94-nada.scala | 11 --- tests/pos/i94-nada.scala | 2 +- tests/pos/t2712-6.scala | 12 ++++ 10 files changed, 103 insertions(+), 66 deletions(-) delete mode 100644 tests/neg/i94-nada.scala create mode 100644 tests/pos/t2712-6.scala diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala index c5e3bad40..18e47a7f2 100644 --- a/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -300,19 +300,21 @@ trait ConstraintHandling { * to refer to such a lambda parameter because the lambda parameter is * not visible where `A` is defined. Consequently, we need to * approximate the bound so that the lambda parameter does not appear in it. - * Test case in neg/i94-nada.scala. This test crashes with an illegal instance - * error when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is + * If `tp` is an upper bound, we need to approximate with something smaller, + * otherwise something larger. + * Test case in pos/i94-nada.scala. This test crashes with an illegal instance + * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is * missing. */ def pruneLambdaParams(tp: Type) = - if (comparingLambdas) { + if (comparingLambdas && param.binder.isInstanceOf[PolyType]) { val approx = new ApproximatingTypeMap { def apply(t: Type): Type = t match { case t @ PolyParam(tl: TypeLambda, n) => val effectiveVariance = if (fromBelow) -variance else variance val bounds = tl.paramBounds(n) - if (effectiveVariance > 0) bounds.hi - else if (effectiveVariance < 0 ) bounds.lo + if (effectiveVariance > 0) bounds.lo + else if (effectiveVariance < 0) bounds.hi else NoType case _ => mapOver(t) diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala index 229df4576..d46ea6b0f 100644 --- a/src/dotty/tools/dotc/core/Symbols.scala +++ b/src/dotty/tools/dotc/core/Symbols.scala @@ -491,7 +491,7 @@ object Symbols { // TypeParamInfo methods def isTypeParam(implicit ctx: Context) = denot.is(TypeParam) - def paramName(implicit ctx: Context): Name = name + def paramName(implicit ctx: Context) = name.asTypeName def paramBounds(implicit ctx: Context) = denot.info.bounds def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index 09f006a11..b9957ccb2 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -338,10 +338,10 @@ class TypeApplications(val self: Type) extends AnyVal { * * TODO: Handle parameterized lower bounds */ - def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = { + def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { def expand(tp: Type) = TypeLambda( - tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.paramVariance))( + tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds), tl => tl.lifted(tparams, tp)) assert(!isHK, self) @@ -439,20 +439,13 @@ class TypeApplications(val self: Type) extends AnyVal { } } - /** Encode + /** The type representing * * T[U1, ..., Un] * * where * @param self = `T` * @param args = `U1,...,Un` - * performing the following simplifications - * - * 1. If `T` is an eta expansion `[X1,..,Xn] -> C[X1,...,Xn]` of class `C` compute - * `C[U1, ..., Un]` instead. - * 2. If `T` is some other type lambda `[X1,...,Xn] -> S` none of the arguments - * `U1,...,Un` is a wildcard, compute `[X1:=U1, ..., Xn:=Un]S` instead. - * 3. If `T` is a polytype, instantiate it to `U1,...,Un`. */ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ { val typParams = self.typeParams @@ -469,30 +462,52 @@ class TypeApplications(val self: Type) extends AnyVal { } case nil => t } + val stripped = self.stripTypeVar + val dealiased = stripped.safeDealias if (args.isEmpty || ctx.erasedTypes) self - else self.stripTypeVar.safeDealias match { - case self: TypeLambda => - if (!args.exists(_.isInstanceOf[TypeBounds])) self.instantiate(args) - else { - val reducer = new Reducer(self, args) - val reduced = reducer(self.resType) - if (reducer.allReplaced) reduced - else HKApply(self, args) - } - case self: PolyType => - self.instantiate(args) - case self: AndOrType => - self.derivedAndOrType(self.tp1.appliedTo(args), self.tp2.appliedTo(args)) - case self: TypeAlias => - self.derivedTypeAlias(self.alias.appliedTo(args)) - case self: TypeBounds => - self.derivedTypeBounds(self.lo, self.hi.appliedTo(args)) - case self: LazyRef => - LazyRef(() => self.ref.appliedTo(args)) - case self: WildcardType => - self - case self: TypeRef if self.symbol == defn.NothingClass => - self + else dealiased match { + case dealiased: TypeLambda => + def tryReduce = + if (!args.exists(_.isInstanceOf[TypeBounds])) { + val reduced = dealiased.instantiate(args) + if (dealiased eq stripped) reduced + else reduced match { + case AppliedType(tycon, args) if variancesConform(typParams, tycon.typeParams) => + // Reducing is safe for type inference, as kind of type constructor does not change + //println(i"reduced: $reduced instead of ${HKApply(self, args)}") + reduced + case _ => + // Reducing changes kind, keep hk application instead + //println(i"fallback: ${HKApply(self, args)} instead of $reduced") + HKApply(self, args) + } + } + else dealiased.resType match { + case AppliedType(tycon, args1) if tycon.safeDealias ne tycon => + dealiased + .derivedTypeLambda(resType = tycon.safeDealias.appliedTo(args1)) + .appliedTo(args) + case _ => + val reducer = new Reducer(dealiased, args) + val reduced = reducer(dealiased.resType) + if (reducer.allReplaced) reduced + else HKApply(dealiased, args) + } + tryReduce + case dealiased: PolyType => + dealiased.instantiate(args) + case dealiased: AndOrType => + dealiased.derivedAndOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args)) + case dealiased: TypeAlias => + dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args)) + case dealiased: TypeBounds => + dealiased.derivedTypeBounds(dealiased.lo, dealiased.hi.appliedTo(args)) + case dealiased: LazyRef => + LazyRef(() => dealiased.ref.appliedTo(args)) + case dealiased: WildcardType => + dealiased + case dealiased: TypeRef if dealiased.symbol == defn.NothingClass => + dealiased case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] => HKApply(self, args) case dealiased => diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index faa4e1b16..a763e1de8 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -628,9 +628,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { val tparams1 = tparams1a.drop(lengthDiff) variancesConform(tparams1, tparams) && { if (lengthDiff > 0) - tycon1b = tycon1a - .appliedTo(args1.take(lengthDiff) ++ tparams1.map(_.paramRef)) - .LambdaAbstract(tparams1) + tycon1b = TypeLambda(tparams1.map(_.paramName), tparams1.map(_.paramVariance))( + tl => tparams1.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds), + tl => tycon1a.appliedTo(args1.take(lengthDiff) ++ + tparams1.indices.toList.map(PolyParam(tl, _)))) (ctx.mode.is(Mode.TypevarsMissContext) || tryInstantiate(tycon2, tycon1b.ensureHK)) && isSubType(tp1, tycon1b.appliedTo(args2)) diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala index 1d79e4204..647c895db 100644 --- a/src/dotty/tools/dotc/core/TypeParamInfo.scala +++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.core -import Names.Name +import Names.TypeName import Contexts.Context import Types.{Type, TypeBounds} @@ -15,22 +15,22 @@ trait TypeParamInfo { def isTypeParam(implicit ctx: Context): Boolean /** The name of the type parameter */ - def paramName(implicit ctx: Context): Name + def paramName(implicit ctx: Context): TypeName /** The info of the type parameter */ def paramBounds(implicit ctx: Context): TypeBounds /** The info of the type parameter as seen from a prefix type. * For type parameter symbols, this is the `memberInfo` as seen from `prefix`. - * For type lambda parameters, it's the same as `paramBounds` as + * For type lambda parameters, it's the same as `paramBounds` as * `asSeenFrom` has already been applied to the whole type lambda. */ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds - + /** The parameter bounds, or the completer if the type parameter * is an as-yet uncompleted symbol. */ - def paramBoundsOrCompleter(implicit ctx: Context): Type + def paramBoundsOrCompleter(implicit ctx: Context): Type /** The variance of the type parameter */ def paramVariance(implicit ctx: Context): Int diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 63f39637b..938e40128 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -117,6 +117,7 @@ object Types { case _ => this1.symbol eq sym } case this1: RefinedOrRecType => this1.parent.isRef(sym) + case this1: HKApply => this1.superType.isRef(sym) case _ => false } @@ -857,6 +858,10 @@ object Types { tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot) case tp: LazyRef => tp.ref.dealias + case app @ HKApply(tycon, args) => + val tycon1 = tycon.dealias + if (tycon1 ne tycon) app.superType.dealias + else this case _ => this } @@ -2586,7 +2591,7 @@ object Types { lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) - def derivedTypeLambda(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type = + def derivedTypeLambda(paramNames: List[TypeName] = paramNames, paramBounds: List[TypeBounds] = paramBounds, resType: Type)(implicit ctx: Context): Type = resType match { case resType @ TypeAlias(alias) => resType.derivedTypeAlias(duplicate(paramNames, paramBounds, alias)) @@ -2640,12 +2645,21 @@ object Types { abstract case class HKApply(tycon: Type, args: List[Type]) extends CachedProxyType with ValueType { + private var validSuper: Period = Nowhere + private var cachedSuper: Type = _ + override def underlying(implicit ctx: Context): Type = tycon - override def superType(implicit ctx: Context): Type = tycon match { - case tp: TypeLambda => defn.AnyType - case tp: TypeProxy => tp.superType.applyIfParameterized(args) - case _ => defn.AnyType + override def superType(implicit ctx: Context): Type = { + if (ctx.period != validSuper) { + cachedSuper = tycon match { + case tp: TypeLambda => defn.AnyType + case tp: TypeProxy => tp.superType.applyIfParameterized(args) + case _ => defn.AnyType + } + validSuper = ctx.period + } + cachedSuper } /* def lowerBound(implicit ctx: Context): Type = tycon.stripTypeVar match { @@ -2760,7 +2774,11 @@ object Types { else bounds(paramNum) } // no customized hashCode/equals needed because cycle is broken in PolyType - override def toString = s"PolyParam($paramName)" + override def toString = + try s"PolyParam($paramName)" + catch { + case ex: IndexOutOfBoundsException => s"PolyParam()" + } override def computeHash = doHash(paramNum, binder.identityHash) diff --git a/tests/neg/boundspropagation.scala b/tests/neg/boundspropagation.scala index b545b09da..dd4ebf513 100644 --- a/tests/neg/boundspropagation.scala +++ b/tests/neg/boundspropagation.scala @@ -40,5 +40,5 @@ object test4 { } class Test5 { -"": ({ type U = this.type })#U // error // error +"": ({ type U = this.type })#U // error } diff --git a/tests/neg/i94-nada.scala b/tests/neg/i94-nada.scala deleted file mode 100644 index 8ca104e06..000000000 --- a/tests/neg/i94-nada.scala +++ /dev/null @@ -1,11 +0,0 @@ -trait Test1 { - trait Monad[MX] { - def x: MX - } - sealed abstract class Either[A,B] - case class Left[A,B](x: A) extends Either[A,B] with Monad[A] - case class Right[A,B](x: B) extends Either[A,B] with Monad[B] - def flatMap[FX,FY,M[FMX]<:Monad[FMX]](m: M[FX], f: FX => M[FY]): M[FY] = f(m.x) - println(flatMap(Left(1), {x: Int => Left(x)})) // error: Left does not conform to [X] -> Monad[X] - -} diff --git a/tests/pos/i94-nada.scala b/tests/pos/i94-nada.scala index 1c7d88a10..2c3cf895c 100644 --- a/tests/pos/i94-nada.scala +++ b/tests/pos/i94-nada.scala @@ -35,7 +35,7 @@ trait Test2 { case class Left[A,B](x: A) extends Either[A,B] with Monad[A] case class Right[A,B](x: B) extends Either[A,B] with Monad[B] def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] - println(flatMap(Right(1), {x: Int => Right(x)})) + println(flatMap(Left(1), {x: Int => Left(x)})) } trait Test3 { def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y] diff --git a/tests/pos/t2712-6.scala b/tests/pos/t2712-6.scala new file mode 100644 index 000000000..dbba60472 --- /dev/null +++ b/tests/pos/t2712-6.scala @@ -0,0 +1,12 @@ +package test + +object Tags { + type Tagged[A, T] = {type Tag = T; type Self = A} + + type @@[T, Tag] = Tagged[T, Tag] + + trait Disjunction + + def meh[M[_], A](ma: M[A]): M[A] = ma + meh(null: Int @@ Disjunction)//.asInstanceOf[Int @@ Disjunction]) +} -- cgit v1.2.3 From c7f3b45abf221c432b3fbd7462741b00a9e10142 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 12 Jul 2016 18:12:27 +0200 Subject: Drop restriction to 2nd order hk types Allow hk type parameters to be higher kinded themselves. --- docs/SyntaxSummary.txt | 5 +++-- src/dotty/tools/dotc/parsing/Parsers.scala | 6 ++---- tests/disabled/not-representable/pos/t2066.scala | 6 +++--- tests/neg/t2994.scala | 6 +++--- tests/pos/t2066.scala | 25 ++++++++++++++++++++++++ tests/pos/t2712-3.scala | 24 +++++++++++++++++++++++ 6 files changed, 60 insertions(+), 12 deletions(-) create mode 100644 tests/pos/t2066.scala create mode 100644 tests/pos/t2712-3.scala diff --git a/docs/SyntaxSummary.txt b/docs/SyntaxSummary.txt index 45937fb54..6751c90e2 100644 --- a/docs/SyntaxSummary.txt +++ b/docs/SyntaxSummary.txt @@ -225,7 +225,8 @@ grammar. TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]' - HkTypeParam ::= {Annotation} ['+' | `-'] (Id | `_') TypeBounds + HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypeParamClause] | `_') + TypeBounds ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)'] ClsParamClause ::= [nl] `(' [ClsParams] ')' @@ -280,7 +281,7 @@ grammar. DefDcl ::= DefSig [`:' Type] DefDef(_, name, tparams, vparamss, tpe, EmptyTree) DefSig ::= id [DefTypeParamClause] DefParamClauses TypeDcl ::= id [TypTypeParamClause] ['=' Type] TypeDefTree(_, name, tparams, tpt) - | id [HkParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds) + | id [HkTypeParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds) Def ::= `val' PatDef | `var' VarDef diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index 51b681c0e..c535c4241 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1551,7 +1551,7 @@ object Parsers { * TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds * * HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]' - * HkTypeParam ::= {Annotation} ['+' | `-'] (Id | _') TypeBounds + * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds */ def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets { def typeParam(): TypeDef = { @@ -1584,9 +1584,7 @@ object Parsers { in.nextToken() ctx.freshName(nme.USCORE_PARAM_PREFIX).toTypeName } - val hkparams = - if (ownerKind == ParamOwner.TypeParam) Nil - else typeParamClauseOpt(ParamOwner.TypeParam) + val hkparams = typeParamClauseOpt(ParamOwner.TypeParam) val bounds = if (isConcreteOwner) typeParamBounds(name) else typeBounds() diff --git a/tests/disabled/not-representable/pos/t2066.scala b/tests/disabled/not-representable/pos/t2066.scala index 30cb99d45..d175769fa 100644 --- a/tests/disabled/not-representable/pos/t2066.scala +++ b/tests/disabled/not-representable/pos/t2066.scala @@ -3,7 +3,7 @@ trait A1 { } trait B1 extends A1 { - override def f[T[_]] = () + override def f[T[+_]] = () } @@ -12,12 +12,12 @@ trait A2 { } trait B2 extends A2 { - override def f[T[_]] = () + override def f[T[-_]] = () } trait A3 { - def f[T[X[_]]] = () + def f[T[X[+_]]] = () } trait B3 extends A3 { diff --git a/tests/neg/t2994.scala b/tests/neg/t2994.scala index 23a3b6a8b..6964a080e 100644 --- a/tests/neg/t2994.scala +++ b/tests/neg/t2994.scala @@ -7,7 +7,7 @@ object Naturals { type a[s[_ <: NAT] <: NAT, z <: NAT] = z } final class SUCC[n <: NAT] extends NAT { - type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]] // old-error: not a legal path + type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]] // error: not a legal path } type _0 = ZERO type _1 = SUCC[_0] @@ -20,8 +20,8 @@ object Naturals { // crashes scala-2.8.0 beta1 trait MUL[n <: NAT, m <: NAT] extends NAT { - trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] } // can't do double param lists: // error: `]' expected but `[` found. // error: wrong number of type arguments - type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // old-error: not a legal path // old-error: not a legal path + trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] } + type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // error: not a legal path // error: not a legal path } } diff --git a/tests/pos/t2066.scala b/tests/pos/t2066.scala new file mode 100644 index 000000000..d175769fa --- /dev/null +++ b/tests/pos/t2066.scala @@ -0,0 +1,25 @@ +trait A1 { + def f[T[+_]] = () +} + +trait B1 extends A1 { + override def f[T[+_]] = () +} + + +trait A2 { + def f[T[-_]] = () +} + +trait B2 extends A2 { + override def f[T[-_]] = () +} + + +trait A3 { + def f[T[X[+_]]] = () +} + +trait B3 extends A3 { + override def f[T[X[+_]]] = () +} diff --git a/tests/pos/t2712-3.scala b/tests/pos/t2712-3.scala new file mode 100644 index 000000000..dd599f40f --- /dev/null +++ b/tests/pos/t2712-3.scala @@ -0,0 +1,24 @@ +package test + +object Test1 { + class Foo[T, F[_]] + def meh[M[_[_]], F[_]](x: M[F]): M[F] = x + meh(new Foo[Int, List]) // solves ?M = [X[_]]Foo[Int, X[_]] ?A = List ... +} + +object Test2 { + trait TC[T] + class Foo[F[_], G[_]] + def meh[GG[_[_]]](g: GG[TC]) = ??? + meh(new Foo[TC, TC]) // solves ?G = [X[_]]Foo[TC, X] +} + +object Test3 { + trait TC[F[_]] + trait TC2[F[_]] + class Foo[F[_[_]], G[_[_]]] + new Foo[TC, TC2] + + def meh[G[_[_[_]]]](g: G[TC2]) = ??? + meh(new Foo[TC, TC2]) // solves ?G = [X[_[_]]]Foo[TC, X] +} -- cgit v1.2.3 From 7df0fa52ade1e4cfe3d50a9ea7e5adf2d8c161c0 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 12 Jul 2016 18:16:53 +0100 Subject: Enable test for fixed #1181: Typer should preserve aliases in TypeTrees This was fixed two commits ago. --- tests/pending/pos/i1181.scala | 12 ------------ tests/pos/i1181.scala | 12 ++++++++++++ 2 files changed, 12 insertions(+), 12 deletions(-) delete mode 100644 tests/pending/pos/i1181.scala create mode 100644 tests/pos/i1181.scala diff --git a/tests/pending/pos/i1181.scala b/tests/pending/pos/i1181.scala deleted file mode 100644 index 057c938d3..000000000 --- a/tests/pending/pos/i1181.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - def foo[M[_]](x: M[Int]) = x - - type Alias[A] = (A, A) - val x: Alias[Int] = (1, 2) - - foo[Alias](x) // ok - foo(x) // ok in scalac but fails in dotty with: - // error: type mismatch: - // found : (Int, Int) - // required: M[Int] -} diff --git a/tests/pos/i1181.scala b/tests/pos/i1181.scala new file mode 100644 index 000000000..057c938d3 --- /dev/null +++ b/tests/pos/i1181.scala @@ -0,0 +1,12 @@ +object Test { + def foo[M[_]](x: M[Int]) = x + + type Alias[A] = (A, A) + val x: Alias[Int] = (1, 2) + + foo[Alias](x) // ok + foo(x) // ok in scalac but fails in dotty with: + // error: type mismatch: + // found : (Int, Int) + // required: M[Int] +} -- cgit v1.2.3 From 1443fd4c844c1c54e59479e156833d2cce9a349a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Tue, 12 Jul 2016 19:15:19 +0200 Subject: Optimize hk comparisons Use (cached) superType where possible. --- src/dotty/tools/dotc/core/TypeComparer.scala | 22 +++++++++++----------- src/dotty/tools/dotc/core/Types.scala | 13 +++++++++++++ 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index a763e1de8..31cc87b3a 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -668,25 +668,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1) * tp1 <:< app2 using isSubType (this might instantiate params in tp2) */ - def compareLower(tycon2bounds: TypeBounds): Boolean = { - val app2 = tycon2bounds.lo.applyIfParameterized(args2) - if (tycon2bounds.lo eq tycon2bounds.hi) isSubType(tp1, app2) - else either(fourthTry(tp1, tp2), isSubType(tp1, app2)) + def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = { + def app2 = tycon2bounds.lo.applyIfParameterized(args2) + if (tycon2bounds.lo eq tycon2bounds.hi) + isSubType(tp1, if (tyconIsTypeRef) tp2.superType else app2) + else + either(fourthTry(tp1, tp2), isSubType(tp1, app2)) } tycon2 match { case param2: PolyParam => isMatchingApply(tp1) || { if (canConstrain(param2)) canInstantiate(param2) - else compareLower(bounds(param2)) + else compareLower(bounds(param2), tyconIsTypeRef = false) } case tycon2: TypeRef => isMatchingApply(tp1) || - compareLower(tycon2.info.bounds) - case tycon2: TypeVar => - isSubType(tp1, tycon2.underlying.appliedTo(args2)) - case tycon2: AnnotatedType => - compareHkApply2(tp1, tp2, tycon2.underlying, args2) + compareLower(tycon2.info.bounds, tyconIsTypeRef = true) + case _: TypeVar | _: AnnotatedType => + isSubType(tp1, tp2.superType) case _ => false } @@ -706,7 +706,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2) case tycon1: TypeProxy => - isSubType(tycon1.superType.applyIfParameterized(args1), tp2) + isSubType(tp1.superType, tp2) case _ => false } diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 938e40128..fc68740eb 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2661,6 +2661,19 @@ object Types { } cachedSuper } + + def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match { + case tycon: TypeRef => + tycon.info match { + case TypeBounds(lo, hi) => + if (lo eq hi) superType // optimization, can profit from caching in this case + else lo.applyIfParameterized(args) + case _ => NoType + } + case _ => + NoType + } + /* def lowerBound(implicit ctx: Context): Type = tycon.stripTypeVar match { case tp: TypeRef => -- cgit v1.2.3 From f50cb2040494e622f17a1bcc323424a27129fc3a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 13 Jul 2016 13:46:54 +0200 Subject: Make rewritings of hk applications configurable Beta-reduce only if `Config.simplifyApplications` is true. Turning off beta-reduction revealed two problems which are also fixed in this commit: 1. Bad treatement of higher-kinded argyments in cyclicity checking 2. Wrong variance for higher-kinded arguments in TypeAccumulator --- src/dotty/tools/dotc/config/Config.scala | 7 ++++++ src/dotty/tools/dotc/core/TypeApplications.scala | 28 +++++++++++------------- src/dotty/tools/dotc/core/Types.scala | 23 ++++++++++++++++--- src/dotty/tools/dotc/typer/Checking.scala | 2 ++ 4 files changed, 42 insertions(+), 18 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 3cc3091b5..0fad2e105 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -98,6 +98,13 @@ object Config { */ final val splitProjections = false + /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for + * `[Xs] -> U` to `[Xs := Ts]U`. If this flag is off, the rewriting is only done if `S` is a + * reference to an instantiated parameter. Turning this flag on was observed to + * give a ~6% speedup on the JUnit test suite. + */ + final val simplifyApplications = true + /** Initial size of superId table */ final val InitialSuperIdsSize = 4096 diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index b9957ccb2..db6020e54 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -469,18 +469,21 @@ class TypeApplications(val self: Type) extends AnyVal { case dealiased: TypeLambda => def tryReduce = if (!args.exists(_.isInstanceOf[TypeBounds])) { - val reduced = dealiased.instantiate(args) - if (dealiased eq stripped) reduced - else reduced match { - case AppliedType(tycon, args) if variancesConform(typParams, tycon.typeParams) => - // Reducing is safe for type inference, as kind of type constructor does not change - //println(i"reduced: $reduced instead of ${HKApply(self, args)}") - reduced + val followAlias = stripped match { + case stripped: TypeRef => + stripped.symbol.is(BaseTypeArg) case _ => - // Reducing changes kind, keep hk application instead - //println(i"fallback: ${HKApply(self, args)} instead of $reduced") - HKApply(self, args) + Config.simplifyApplications && { + dealiased.resType match { + case AppliedType(tyconBody, _) => + variancesConform(typParams, tyconBody.typeParams) + // Reducing is safe for type inference, as kind of type constructor does not change + case _ => false + } + } } + if ((dealiased eq stripped) || followAlias) dealiased.instantiate(args) + else HKApply(self, args) } else dealiased.resType match { case AppliedType(tycon, args1) if tycon.safeDealias ne tycon => @@ -665,11 +668,6 @@ class TypeApplications(val self: Type) extends AnyVal { } } - final def typeConstructor(implicit ctx: Context): Type = self.stripTypeVar match { - case AppliedType(tycon, _) => tycon - case self => self - } - /** If this is the image of a type argument; recover the type argument, * otherwise NoType. */ diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index fc68740eb..d1e5ba47d 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -3422,8 +3422,9 @@ object Types { case tp: HKApply => def mapArg(arg: Type, tparam: TypeParamInfo): Type = { val saved = variance - if (tparam.paramVariance < 0) variance = -variance - else if (tparam.paramVariance == 0) variance = 0 + val pvariance = tparam.paramVariance + if (pvariance < 0) variance = -variance + else if (pvariance == 0) variance = 0 try this(arg) finally variance = saved } @@ -3629,7 +3630,23 @@ object Types { this(x, prefix) case tp @ HKApply(tycon, args) => - foldOver(this(x, tycon), args) + def foldArgs(x: T, tparams: List[TypeParamInfo], args: List[Type]): T = + if (args.isEmpty) { + assert(tparams.isEmpty) + x + } + else { + val tparam = tparams.head + val saved = variance + val pvariance = tparam.paramVariance + if (pvariance < 0) variance = -variance + else if (pvariance == 0) variance = 0 + val acc = + try this(x, args.head) + finally variance = saved + foldArgs(acc, tparams.tail, args.tail) + } + foldArgs(this(x, tycon), tp.typeParams, args) case tp: AndOrType => this(this(x, tp.tp1), tp.tp2) diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index aa13bdc3d..b37216e6e 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -204,6 +204,8 @@ object Checking { tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp: RecType => tp.rebind(this(tp.parent)) + case tp @ HKApply(tycon, args) => + tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK))) case tp @ TypeRef(pre, name) => try { // A prefix is interesting if it might contain (transitively) a reference -- cgit v1.2.3 From 0ff5354e2a5e3f29234d6d761177f9228d8da522 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 13 Jul 2016 13:58:51 +0200 Subject: Simplify variance computations --- src/dotty/tools/dotc/core/Types.scala | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index d1e5ba47d..284e9535e 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -3422,9 +3422,7 @@ object Types { case tp: HKApply => def mapArg(arg: Type, tparam: TypeParamInfo): Type = { val saved = variance - val pvariance = tparam.paramVariance - if (pvariance < 0) variance = -variance - else if (pvariance == 0) variance = 0 + variance *= tparam.paramVariance try this(arg) finally variance = saved } @@ -3638,9 +3636,7 @@ object Types { else { val tparam = tparams.head val saved = variance - val pvariance = tparam.paramVariance - if (pvariance < 0) variance = -variance - else if (pvariance == 0) variance = 0 + variance *= tparam.paramVariance val acc = try this(x, args.head) finally variance = saved -- cgit v1.2.3 From 18b30803952cee83580eab28068bc773fdce780e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 13 Jul 2016 15:17:17 +0200 Subject: Allow hk parameters in lower bounds --- src/dotty/tools/dotc/core/TypeApplications.scala | 8 +++++--- src/dotty/tools/dotc/core/Types.scala | 4 +++- src/dotty/tools/dotc/typer/Checking.scala | 12 ------------ src/dotty/tools/dotc/typer/Typer.scala | 1 - tests/neg/hklower.scala | 11 ----------- tests/neg/hklower2.scala | 4 ++++ tests/pos/hklower.scala | 11 +++++++++++ 7 files changed, 23 insertions(+), 28 deletions(-) delete mode 100644 tests/neg/hklower.scala create mode 100644 tests/neg/hklower2.scala create mode 100644 tests/pos/hklower.scala diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index db6020e54..ef241cb66 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -349,7 +349,9 @@ class TypeApplications(val self: Type) extends AnyVal { case self: TypeAlias => self.derivedTypeAlias(expand(self.alias)) case self @ TypeBounds(lo, hi) => - self.derivedTypeBounds(lo, expand(hi)) + self.derivedTypeBounds( + if (lo.isRef(defn.NothingClass)) lo else expand(lo), + expand(hi)) case _ => expand(self) } } @@ -431,7 +433,7 @@ class TypeApplications(val self: Type) extends AnyVal { case arg @ TypeAlias(alias) => arg.derivedTypeAlias(adaptArg(alias)) case arg @ TypeBounds(lo, hi) => - arg.derivedTypeBounds(lo, adaptArg(hi)) + arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi)) case _ => arg } @@ -504,7 +506,7 @@ class TypeApplications(val self: Type) extends AnyVal { case dealiased: TypeAlias => dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args)) case dealiased: TypeBounds => - dealiased.derivedTypeBounds(dealiased.lo, dealiased.hi.appliedTo(args)) + dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args)) case dealiased: LazyRef => LazyRef(() => dealiased.ref.appliedTo(args)) case dealiased: WildcardType => diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 284e9535e..1bfe9cbd1 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -2596,7 +2596,9 @@ object Types { case resType @ TypeAlias(alias) => resType.derivedTypeAlias(duplicate(paramNames, paramBounds, alias)) case resType @ TypeBounds(lo, hi) => - resType.derivedTypeBounds(lo, duplicate(paramNames, paramBounds, hi)) + resType.derivedTypeBounds( + if (lo.isRef(defn.NothingClass)) lo else duplicate(paramNames, paramBounds, lo), + duplicate(paramNames, paramBounds, hi)) case _ => derivedGenericType(paramNames, paramBounds, resType) } diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index b37216e6e..9e55216c1 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -534,17 +534,6 @@ trait Checking { errorTree(tpt, d"missing type parameter for ${tpt.tpe}") } else tpt - - def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = - if (tparams.nonEmpty) - sym.info match { - case info: TypeAlias => // ok - case TypeBounds(lo, _) => - for (tparam <- tparams) - if (tparam.typeRef.occursIn(lo)) - ctx.error(i"type parameter ${tparam.name} may not occur in lower bound $lo", pos) - case _ => - } } trait NoChecking extends Checking { @@ -558,5 +547,4 @@ trait NoChecking extends Checking { override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = () override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = () override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt - override def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = () } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 62d356e3d..4e2842da7 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -1064,7 +1064,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context): Tree = track("typedTypeDef") { val TypeDef(name, rhs) = tdef - checkLowerNotHK(sym, tdef.tparams.map(symbolOfTree), tdef.pos) completeAnnotations(tdef, sym) assignType(cpy.TypeDef(tdef)(name, typedType(rhs), Nil), sym) } diff --git a/tests/neg/hklower.scala b/tests/neg/hklower.scala deleted file mode 100644 index e1e79070e..000000000 --- a/tests/neg/hklower.scala +++ /dev/null @@ -1,11 +0,0 @@ -class Test { // error: conflicting bounds - - type T[X] // OK - type U[X] = T[X] // OK - - type V[X] >: T[X] // error - type W[X] >: T[X] <: T[X] // error - - def f[C[X] >: T[X]]() = ??? // error - -} diff --git a/tests/neg/hklower2.scala b/tests/neg/hklower2.scala new file mode 100644 index 000000000..8268bf09f --- /dev/null +++ b/tests/neg/hklower2.scala @@ -0,0 +1,4 @@ +class Test { // error: conflicting bounds + trait T[X] + type Z[X] >: String <: T[X] +} diff --git a/tests/pos/hklower.scala b/tests/pos/hklower.scala new file mode 100644 index 000000000..033ee3a34 --- /dev/null +++ b/tests/pos/hklower.scala @@ -0,0 +1,11 @@ +class Test { // error: conflicting bounds + + type T[X] + type U[X] = T[X] + + type V[X] >: T[X] + type W[X] >: T[X] <: T[X] + + def f[C[X] >: T[X]]() = ??? + +} -- cgit v1.2.3 From 82fc27f0c2c800de786b54110cfd8627b043fe6d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 14 Jul 2016 18:06:48 +0200 Subject: Fix bounds checking of hk applied typed Previous logic could only handle classes as constructors. Also, address other reviewers comments. --- src/dotty/tools/dotc/config/Config.scala | 5 ++- src/dotty/tools/dotc/core/TypeApplications.scala | 7 ++-- src/dotty/tools/dotc/core/Types.scala | 45 ++++++++---------------- src/dotty/tools/dotc/typer/Checking.scala | 19 +++++----- tests/neg/t7278.scala | 8 ++--- tests/pos-scala2/t2994.scala | 2 +- tests/pos/hklower.scala | 34 ++++++++++++++++-- 7 files changed, 67 insertions(+), 53 deletions(-) diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala index 0fad2e105..a50945108 100644 --- a/src/dotty/tools/dotc/config/Config.scala +++ b/src/dotty/tools/dotc/config/Config.scala @@ -72,10 +72,9 @@ object Config { /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ final val fastPathForRefinedSubtype = true - /** If this flag is set, $apply projections are checked that they apply to a - * higher-kinded type. + /** If this flag is set, higher-kinded applications are checked for validity */ - final val checkProjections = false + final val checkHKApplications = false /** The recursion depth for showing a summarized string */ final val summarizeDepth = 2 diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index ef241cb66..314233709 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -338,13 +338,12 @@ class TypeApplications(val self: Type) extends AnyVal { * * TODO: Handle parameterized lower bounds */ - def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = { + def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = { def expand(tp: Type) = TypeLambda( - tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.variance))( + tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.paramVariance))( tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds), tl => tl.lifted(tparams, tp)) - assert(!isHK, self) self match { case self: TypeAlias => self.derivedTypeAlias(expand(self.alias)) @@ -489,6 +488,8 @@ class TypeApplications(val self: Type) extends AnyVal { } else dealiased.resType match { case AppliedType(tycon, args1) if tycon.safeDealias ne tycon => + // In this case we should always dealias since we cannot handle + // higher-kinded applications to wildcard arguments. dealiased .derivedTypeLambda(resType = tycon.safeDealias.appliedTo(args1)) .appliedTo(args) diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 1bfe9cbd1..fa402f9fc 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -517,7 +517,7 @@ object Types { def goApply(tp: HKApply) = tp.tycon match { case tl: TypeLambda => go(tl.resType).mapInfo(info => - tl.derivedTypeLambda(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args)) + tl.derivedLambdaAbstraction(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args)) case _ => go(tp.superType) } @@ -879,12 +879,6 @@ object Types { case _ => this } - /** If this is a TypeAlias type, its alias, otherwise this type itself */ - final def followTypeAlias(implicit ctx: Context): Type = this match { - case TypeAlias(alias) => alias - case _ => this - } - /** If this is a (possibly aliased, annotated, and/or parameterized) reference to * a class, the class type ref, otherwise NoType. * @param refinementOK If `true` we also skip non-parameter refinements. @@ -1923,13 +1917,9 @@ object Types { } object TypeRef { - def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) = () - /** Create type ref with given prefix and name */ - def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = { - if (Config.checkProjections) checkProjection(prefix, name) + def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef] - } /** Create type ref to given symbol */ def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef = @@ -1938,10 +1928,8 @@ object Types { /** Create a non-member type ref (which cannot be reloaded using `member`), * with given prefix, name, and symbol. */ - def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = { - if (Config.checkProjections) checkProjection(prefix, name) + def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = unique(new TypeRefWithFixedSym(prefix, name, sym)) - } /** Create a type ref referring to given symbol with given name. * This is very similar to TypeRef(Type, Symbol), @@ -2057,9 +2045,7 @@ object Types { private def badInst = throw new AssertionError(s"bad instantiation: $this") - def checkInst(implicit ctx: Context): this.type = { - this - } + def checkInst(implicit ctx: Context): this.type = this // debug hook def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this @@ -2139,7 +2125,7 @@ object Types { override def computeHash = doHash(parent) override def toString = s"RecType($parent | $hashCode)" - private def checkInst(implicit ctx: Context): this.type = this + private def checkInst(implicit ctx: Context): this.type = this // debug hook } object RecType { @@ -2550,8 +2536,8 @@ object Types { case _ => false } - def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) = - derivedGenericType(paramNames, paramBounds, resType) + def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): PolyType = + derivedGenericType(paramNames, paramBounds, resType).asInstanceOf[PolyType] def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): PolyType = PolyType(paramNames)( @@ -2591,7 +2577,7 @@ object Types { lazy val typeParams: List[LambdaParam] = paramNames.indices.toList.map(new LambdaParam(this, _)) - def derivedTypeLambda(paramNames: List[TypeName] = paramNames, paramBounds: List[TypeBounds] = paramBounds, resType: Type)(implicit ctx: Context): Type = + def derivedLambdaAbstraction(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type = resType match { case resType @ TypeAlias(alias) => resType.derivedTypeAlias(duplicate(paramNames, paramBounds, alias)) @@ -2600,9 +2586,12 @@ object Types { if (lo.isRef(defn.NothingClass)) lo else duplicate(paramNames, paramBounds, lo), duplicate(paramNames, paramBounds, hi)) case _ => - derivedGenericType(paramNames, paramBounds, resType) + derivedTypeLambda(paramNames, paramBounds, resType) } + def derivedTypeLambda(paramNames: List[TypeName] = paramNames, paramBounds: List[TypeBounds] = paramBounds, resType: Type)(implicit ctx: Context): TypeLambda = + derivedGenericType(paramNames, paramBounds, resType).asInstanceOf[TypeLambda] + def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): TypeLambda = TypeLambda(paramNames, variances)( x => paramBounds mapConserve (_.subst(this, x).bounds), @@ -2664,6 +2653,7 @@ object Types { cachedSuper } + /* (Not needed yet) */ def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match { case tycon: TypeRef => tycon.info match { @@ -2676,13 +2666,6 @@ object Types { NoType } -/* - def lowerBound(implicit ctx: Context): Type = tycon.stripTypeVar match { - case tp: TypeRef => - val lb = tp.info.bounds.lo.typeParams.length == args.lengt - case _ => defn.NothingType - } -*/ def typeParams(implicit ctx: Context): List[TypeParamInfo] = { val tparams = tycon.typeParams if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams @@ -2705,7 +2688,7 @@ object Types { case _ => assert(false, s"illegal type constructor in $this") } - check(tycon) + if (Config.checkHKApplications) check(tycon) this } } diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 9e55216c1..b1cceea88 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -83,15 +83,16 @@ object Checking { case AppliedTypeTree(tycon, args) => // If `args` is a list of named arguments, return corresponding type parameters, // otherwise return type parameters unchanged - def matchNamed(tparams: List[TypeSymbol], args: List[Tree]): List[Symbol] = - if (hasNamedArg(args)) - for (NamedArg(name, _) <- args) yield tycon.tpe.member(name).symbol - else - tparams - val tparams = matchNamed(tycon.tpe.typeSymbol.typeParams, args) - val bounds = tparams.map(tparam => - tparam.info.asSeenFrom(tycon.tpe.normalizedPrefix, tparam.owner.owner).bounds) - checkBounds(args, bounds, _.substDealias(tparams, _)) + val tparams = tycon.tpe.typeParams + def argNamed(tparam: TypeParamInfo) = args.find { + case NamedArg(name, _) => name == tparam.paramName + case _ => false + }.getOrElse(TypeTree(tparam.paramRef)) + val orderedArgs = if (hasNamedArg(args)) tparams.map(argNamed) else args + val bounds = tparams.map(_.paramBoundsAsSeenFrom(tycon.tpe)) + def instantiate(bound: Type, args: List[Type]) = + bound.LambdaAbstract(tparams).appliedTo(args) + checkBounds(orderedArgs, bounds, instantiate) def checkValidIfHKApply(implicit ctx: Context): Unit = checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos) diff --git a/tests/neg/t7278.scala b/tests/neg/t7278.scala index 7aafbb76f..643a3c858 100644 --- a/tests/neg/t7278.scala +++ b/tests/neg/t7278.scala @@ -13,8 +13,8 @@ object Test { def fail1(): Unit = { val b = new B - var x1: EE[A] = null - var x2: EE[B] = new b.E // old-error: found: B#E, required: A#E + var x1: EE[A] = null // error: Type argument A does not conform to upper bound EC + var x2: EE[B] = new b.E // error: Type argument B does not conform to upper bound EC // x1 = x2 // gives a prior type error: B#E, required: A#E, masked to get at the real thing. } @@ -27,8 +27,8 @@ object Test { } */ def fail3(): Unit = { - var x1: EE[C] = 5 - var x2: EE[C & D] = "" + var x1: EE[C] = 5 // error: Type argument C does not conform to upper bound EC + var x2: EE[C & D] = "" // error: Type argument C & D does not conform to upper bound EC x1 = x2 } diff --git a/tests/pos-scala2/t2994.scala b/tests/pos-scala2/t2994.scala index c7421c42a..f3009b12f 100644 --- a/tests/pos-scala2/t2994.scala +++ b/tests/pos-scala2/t2994.scala @@ -20,7 +20,7 @@ object Naturals { // crashes scala-2.8.0 beta1 trait MUL[n <: NAT, m <: NAT] extends NAT { - trait curry[n[_, _], s[_]] { type f[z <: NAT] = n[s, z] } + trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] } type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] } diff --git a/tests/pos/hklower.scala b/tests/pos/hklower.scala index 033ee3a34..90aa343ba 100644 --- a/tests/pos/hklower.scala +++ b/tests/pos/hklower.scala @@ -1,4 +1,4 @@ -class Test { // error: conflicting bounds +class Test { type T[X] type U[X] = T[X] @@ -6,6 +6,36 @@ class Test { // error: conflicting bounds type V[X] >: T[X] type W[X] >: T[X] <: T[X] - def f[C[X] >: T[X]]() = ??? + def f[C[X] >: T[X]](x: C[Int]) = ??? + + val v: V[Int] = ??? + val t: T[Int] = ??? + + f[V](v) + + f[V](t) + + +} +class Test2 { + + class T[X] + type U[X] = T[X] + + type V[X] >: T[X] + type W[X] >: T[X] <: T[X] + + def f[C[X] >: T[X]](x: C[Int]) = ??? + + val v: V[Int] = ??? + val t: T[Int] = ??? + + f[V](v) + + f[V](t) + + var x: V[Int] = _ + x = t + } -- cgit v1.2.3 From a737b47a92fe414a5e7f07bae171878c81bf9f45 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 14 Jul 2016 23:13:09 +0200 Subject: Add test case for hk bounds checking --- tests/neg/hk-bounds.scala | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/neg/hk-bounds.scala diff --git a/tests/neg/hk-bounds.scala b/tests/neg/hk-bounds.scala new file mode 100644 index 000000000..80c8cfaa8 --- /dev/null +++ b/tests/neg/hk-bounds.scala @@ -0,0 +1,23 @@ +class Foo[A] +class Bar[B] +class Baz[C] extends Bar[C] + +object Test1 { + type Alias[F[X] <: Foo[X]] = F[Int] + + val x: Alias[Bar] = new Bar[Int] // error: Type argument [X0] -> Bar[X0] does not conform to upper bound [X0] -> Foo[X0] + + def foo[F[X] <: Foo[X]] = () + foo[Bar] // error: Type argument [X0] -> Bar[X0] does not conform to upper bound [X0] -> Foo[X0] + + def bar[B[X] >: Bar[X]] = () + bar[Bar] // ok + bar[Baz] // // error: Type argument [X0] -> Baz[X0] does not conform to lower bound [X0] -> Bar[X0] + bar[Foo] // error: Type argument [X0] -> Foo[X0] does not conform to lower bound [X0] -> Bar[X0] + + def baz[B[X] >: Baz[X]] = () + baz[Bar] //ok + baz[Baz] //ok + baz[Foo] // error: Type argument [X0] -> Foo[X0] does not conform to lower bound [X0] -> Baz[X0] + +} -- cgit v1.2.3 From 894c9fbf247765041fc32788c78b85f1b2b2a191 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 15 Jul 2016 10:30:30 +0200 Subject: Bounds of type lambdas compare contravariantly Enable checking of bounds when comparing type lambdas. This invalidates a pattern used in t2994 and potentially other code, where a bound [X] -> Any is used as a template that is a legal supertype of all other bounds. The old behavior is still available under language:Scala2. --- src/dotty/tools/dotc/core/TypeComparer.scala | 30 +++++++++++++++++----------- tests/neg/hk-bounds.scala | 8 ++++++++ tests/neg/t2994.scala | 2 +- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 31cc87b3a..a895db178 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -386,22 +386,28 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case tp2 @ TypeLambda(tparams2, body2) => def compareHkLambda: Boolean = tp1.stripTypeVar match { case tp1 @ TypeLambda(tparams1, body1) => - // Don't compare bounds of lambdas, or t2994 will fail - // The issue is that, logically, bounds should compare contravariantly, - // so the bounds checking should look like this: - // - // tparams1.corresponds(tparams2)((tparam1, tparam2) => - // isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds)) - // - // But that would invalidate a pattern such as - // `[X0 <: Number] -> Number <:< [X0] -> Any` - // This wpuld mean that there is no convenient means anymore to express a kind - // as a supertype. The fix is to delay the checking of bounds so that only - // bounds of * types are checked. + /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail + * The issue is that, logically, bounds should compare contravariantly, + * but that would invalidate a pattern exploited in t2994: + * + * [X0 <: Number] -> Number <:< [X0] -> Any + * + * Under the new scheme, `[X0] -> Any` is NOT a kind that subsumes + * all other bounds. You'd have to write `[X0 >: Any <: Nothing] -> Any` instead. + * This might look weird, but is the only logically correct way to do it. + * + * Note: it would be nice if this could trigger a migration warning, but I + * am not sure how, since the code is buried so deep in subtyping logic. + */ + def boundsOK = + ctx.scala2Mode || + tparams1.corresponds(tparams2)((tparam1, tparam2) => + isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds)) val saved = comparingLambdas comparingLambdas = true try variancesConform(tparams1, tparams2) && + boundsOK && isSubType(body1, body2.subst(tp2, tp1)) finally comparingLambdas = saved case _ => diff --git a/tests/neg/hk-bounds.scala b/tests/neg/hk-bounds.scala index 80c8cfaa8..db6712d72 100644 --- a/tests/neg/hk-bounds.scala +++ b/tests/neg/hk-bounds.scala @@ -21,3 +21,11 @@ object Test1 { baz[Foo] // error: Type argument [X0] -> Foo[X0] does not conform to lower bound [X0] -> Baz[X0] } +object Test2 { + type Alias[F[X] <: Foo[X]] = F[Int] + + def foo[M[_[_]], A[_]]: M[A] = null.asInstanceOf[M[A]] + + val x = foo[Alias, Bar] // error: Type argument Test2.Alias does not conform to upper bound [X0 <: [X0] -> Any] -> Any + +} diff --git a/tests/neg/t2994.scala b/tests/neg/t2994.scala index 6964a080e..e19397a3d 100644 --- a/tests/neg/t2994.scala +++ b/tests/neg/t2994.scala @@ -21,7 +21,7 @@ object Naturals { // crashes scala-2.8.0 beta1 trait MUL[n <: NAT, m <: NAT] extends NAT { trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] } - type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // error: not a legal path // error: not a legal path + type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // error: not a legal path // error: not a legal path // error: arg does not conform to bound // error: arg does not conform to bound } } -- cgit v1.2.3