aboutsummaryrefslogtreecommitdiff
path: root/src/dotty
diff options
context:
space:
mode:
Diffstat (limited to 'src/dotty')
-rw-r--r--src/dotty/tools/dotc/core/TypeApplications.scala21
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala89
-rw-r--r--src/dotty/tools/dotc/core/Types.scala43
-rw-r--r--src/dotty/tools/dotc/typer/Inferencing.scala2
4 files changed, 86 insertions, 69 deletions
diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala
index c0728a8fb..0edc598dd 100644
--- a/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -38,14 +38,23 @@ object TypeApplications {
case _ => tp
}
- /** Does the variance of `sym1` conform to the variance of `sym2`?
+ /** Does variance `v1` conform to variance `v2`?
* This is the case if the variances are the same or `sym` is nonvariant.
*/
- def varianceConforms(sym1: MemberBinding, sym2: MemberBinding)(implicit ctx: Context) =
- sym1.memberVariance == sym2.memberVariance || sym2.memberVariance == 0
+ def varianceConforms(v1: Int, v2: Int)(implicit ctx: Context): Boolean =
+ v1 == v2 || v2 == 0
- def variancesConform(syms1: List[MemberBinding], syms2: List[MemberBinding])(implicit ctx: Context) =
- syms1.corresponds(syms2)(varianceConforms)
+ /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`?
+ */
+ def varianceConforms(tparam1: MemberBinding, tparam2: MemberBinding)(implicit ctx: Context): Boolean =
+ varianceConforms(tparam1.memberVariance, tparam2.memberVariance)
+
+ /** Doe the variances of type parameters `tparams1` conform to the variances
+ * of corresponding type parameters `tparams2`?
+ * This is only the case of `tparams1` and `tparams2` have the same length.
+ */
+ def variancesConform(tparams1: List[MemberBinding], tparams2: List[MemberBinding])(implicit ctx: Context): Boolean =
+ tparams1.corresponds(tparams2)(varianceConforms)
def fallbackTypeParams(variances: List[Int])(implicit ctx: Context): List[MemberBinding] = {
def memberBindings(vs: List[Int]): Type = vs match {
@@ -102,7 +111,7 @@ object TypeApplications {
def unapply(tp: Type)(implicit ctx: Context): Option[(/*List[Int], */List[TypeBounds], Type)] =
if (Config.newHK) {
def decompose(t: Type, acc: List[TypeBounds]): (List[TypeBounds], Type) = t match {
- case t @ RefinedType(p, rname, rinfo: TypeBounds) if rname.isHkArgName && rinfo.isBinding =>
+ case t @ RefinedType(p, rname, rinfo: TypeBounds) if t.isTypeParam =>
decompose(p, rinfo.bounds :: acc)
case t: RecType =>
decompose(t.parent, acc)
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 5f28d07eb..c1b275b70 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -368,20 +368,31 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
// This twist is needed to make collection/generic/ParFactory.scala compile
fourthTry(tp1, tp2) || compareRefinedSlow
case _ =>
- compareHkApply(tp2, tp1, inOrder = false) ||
- compareHkLambda(tp2, tp1, inOrder = false) ||
- compareRefinedSlow ||
- fourthTry(tp1, tp2) ||
- compareAliasedRefined(tp2, tp1, inOrder = false)
+ if (tp2.isTypeParam) {
+ compareHkLambda(tp1, tp2) ||
+ fourthTry(tp1, tp2)
+ }
+ else {
+ compareHkApply(tp2, tp1, inOrder = false) ||
+ compareRefinedSlow ||
+ fourthTry(tp1, tp2) ||
+ compareAliasedRefined(tp2, tp1, inOrder = false)
+ }
}
else // fast path, in particular for refinements resulting from parameterization.
- isSubType(tp1, skipped2) && // TODO swap?
- isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2)
+ isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) &&
+ isSubType(tp1, skipped2) // TODO swap?
}
compareRefined
case tp2: RecType =>
- val tp1stable = ensureStableSingleton(tp1)
- isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ tp1.safeDealias match {
+ case tp1: RecType =>
+ val rthis1 = RecThis(tp1)
+ isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1))
+ case _ =>
+ val tp1stable = ensureStableSingleton(tp1)
+ isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ }
case OrType(tp21, tp22) =>
// Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22)
// and analogously for T1 <: T21 | (T221 & T222)
@@ -496,7 +507,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths
case tp1: RefinedType =>
compareHkApply(tp1, tp2, inOrder = true) ||
- compareHkLambda(tp1, tp2, inOrder = true) ||
isNewSubType(tp1.parent, tp2) ||
compareAliasedRefined(tp1, tp2, inOrder = true)
case tp1: RecType =>
@@ -609,15 +619,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
tryInfer(projection.prefix.typeConstructor.dealias)
}
- /** If `projection` is a hk projection T#$apply with a constrainable poly param
- * as type constructor and `other` is not a hk projection, then perform the following
- * steps:
+ /** Handle subtype tests
+ *
+ * app <:< other if inOrder = true
+ * other <:< app if inOrder = false
+ *
+ * where `app` is an hk application but `other` is not.
+ *
+ * As a first step, if `app` appears on the right, try to normalize it using
+ * `normalizeHkApply`, if that gives a different type proceed with a regular subtype
+ * test using that type instead of `app`.
+ *
+ * Otherwise, if `app` has constrainable poly param as type constructor,
+ * perform the following steps:
*
* (1) If not `inOrder` then perform the next steps until they all succeed
* for each base type of other which
- * - derives from a class bound of `projection`,
- * - has the same number of type parameters than `projection`
- * - has type parameter variances which conform to those of `projection`.
+ * - derives from a class bound of `app`,
+ * - has the same number of type parameters as `app`
+ * - has type parameter variances which conform to those of `app`.
* If `inOrder` then perform the same steps on the original `other` type.
*
* (2) Try to eta expand the constructor of `other`.
@@ -627,7 +647,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* (3b) In normal mode, try to unify the projection's hk constructor parameter with
* the eta expansion of step(2)
*
- * (4) If `inOrder`, test `projection <: other` else test `other <: projection`.
+ * (4) If `inOrder`, test `app <: other` else test `other <: app`.
*/
def compareHkApply(app: RefinedType, other: Type, inOrder: Boolean): Boolean = {
def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($app, $other, inOrder = $inOrder, constr = $tp)", subtyping) {
@@ -676,7 +696,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
}
}
Config.newHK && app.isHKApply && !other.isHKApply && {
- val reduced = app.normalizeHkApply
+ val reduced = if (inOrder) app else app.normalizeHkApply
if (reduced ne app)
if (inOrder) isSubType(reduced, other) else isSubType(other, reduced)
else tryInfer(app.typeConstructor.dealias)
@@ -684,16 +704,20 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
}
/** Compare type lambda with non-lambda type. */
- def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match {
- case TypeLambda(args, body) =>
- args.length == other.typeParams.length && {
- val applied = other.appliedTo(argRefs(rt, args.length))
- if (inOrder) isSubType(body, applied)
- else body match {
- case body: TypeBounds => body.contains(applied) // Can be dropped?
- case _ => isSubType(applied, body)
- }
+ def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean = tp1.stripTypeVar match {
+ case TypeLambda(args1, body1) =>
+ //println(i"comparing $tp1 <:< $tp2")
+ tp2 match {
+ case TypeLambda(args2, body2) =>
+ args1.corresponds(args2)((arg1, arg2) =>
+ varianceConforms(BindingKind.toVariance(arg1.bindingKind),
+ BindingKind.toVariance(arg2.bindingKind))) &&
+ // don't compare bounds; it would go in the wrong sense anyway.
+ isSubType(body1, body2)
+ case _ => false
}
+ case RefinedType(parent1, _, _) =>
+ compareHkLambda(parent1, tp2)
case _ =>
false
}
@@ -1523,13 +1547,12 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
}
else super.compareHkApply(app, other, inOrder)
- override def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) =
- if (!Config.newHK && rt.refinedName == tpnme.hkApplyOBS ||
- Config.newHK && rt.isTypeParam)
- traceIndented(i"compareHkLambda $rt, $other, $inOrder") {
- super.compareHkLambda(rt, other, inOrder)
+ override def compareHkLambda(tp1: Type, tp2: RefinedType): Boolean =
+ if (tp2.isTypeParam)
+ traceIndented(i"compareHkLambda $tp1, $tp2") {
+ super.compareHkLambda(tp1, tp2)
}
- else super.compareHkLambda(rt, other, inOrder)
+ else super.compareHkLambda(tp1, tp2)
override def toString = "Subtype trace:" + { try b.toString finally b.clear() }
}
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index 3d4ec6601..2120706f6 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -2151,15 +2151,6 @@ object Types {
throw new AssertionError(s"bad instantiation: $this")
def checkInst(implicit ctx: Context): this.type = {
- if (false && Config.newHK && refinedName.isHkArgName && refinedInfo.isInstanceOf[TypeAlias]) {
- parent.stripTypeVar match {
- case TypeApplications.TypeLambda(_, _) =>
- println(i"fshy: $this")
- println(s"fshy: $this")
- new Error().printStackTrace()
- case _ =>
- }
- }
if (refinedName == tpnme.hkApplyOBS)
parent.stripTypeVar match {
case RefinedType(_, name, _) if name.isHkArgName => // ok
@@ -2190,12 +2181,16 @@ object Types {
case _ =>
tp
}
- val reduced = substAlias(parent)
- if (reduced ne parent) {
- hk.println(i"REDUCE $this ----> ${reduced}")
- reduced
+ parent match {
+ case parent: LazyRef =>
+ LazyRef(() => derivedRefinedType(parent.ref, refinedName, refinedInfo))
+ case _ =>
+ val reduced = substAlias(parent)
+ if (reduced ne parent) {
+ hk.println(i"REDUCE $this ----> ${reduced}")
+ reduced
+ } else this
}
- else this
case _ =>
this
}
@@ -2304,7 +2299,6 @@ object Types {
case tp: TypeRef => apply(x, tp.prefix)
case tp: RecThis => RecType.this eq tp.binder
case tp: LazyRef => true // Assume a reference to be safe.
- // TODO: Check that all accumulators handle LazyRefs correctly
case _ => foldOver(x, tp)
}
}
@@ -2315,22 +2309,13 @@ object Types {
override def computeHash = doHash(parent)
override def toString = s"RecType($parent | $hashCode)"
+
+ private def checkInst(implicit ctx: Context): this.type = {
+ this
+ }
}
object RecType {
- /* Note: this might well fail for nested Recs.
- * Failing scenario: Rebind a nest rec, creates a new rec
- * but it still has RecThis references to the outer rec.
- def checkInst(tp: Type)(implicit ctx: Context): tp.type = {
- var binders: List[RecType] = Nil
- tp.foreachPart {
- case rt: RecType => binders = rt :: binders
- case rt: RecThis => assert(binders contains rt.binder)
- case _ =>
- }
- tp
- }
- */
/** Create a RecType, normalizing its contents. This means:
*
@@ -2356,7 +2341,7 @@ object Types {
case tp =>
tp
}
- unique(rt.derivedRecType(normalize(rt.parent)))
+ unique(rt.derivedRecType(normalize(rt.parent))).checkInst
}
def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = {
val rt = this(parentExp)
diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala
index 3b79d7c4c..2b37fa36c 100644
--- a/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -218,7 +218,7 @@ object Inferencing {
val qualifies = (tvar: TypeVar) =>
(tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy
def interpolate() = Stats.track("interpolateUndetVars") {
- val tp = tree.tpe.widen // TODO add `.BetaReduce` ?
+ val tp = tree.tpe.widen
constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}")
constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}")