aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorodersky <odersky@gmail.com>2016-07-15 13:11:52 +0200
committerGitHub <noreply@github.com>2016-07-15 13:11:52 +0200
commit409c6c30c8496529aace68967acccf88850145da (patch)
tree56fd30bbb5d108b895982da72943e649a58fbd40
parent1c02c56213cf22010c0aef1dc1446300fe8005fe (diff)
parent894c9fbf247765041fc32788c78b85f1b2b2a191 (diff)
downloaddotty-409c6c30c8496529aace68967acccf88850145da.tar.gz
dotty-409c6c30c8496529aace68967acccf88850145da.tar.bz2
dotty-409c6c30c8496529aace68967acccf88850145da.zip
Merge pull request #1343 from dotty-staging/change-hk-direct2
Direct representation of higher-kinded types
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/B.scala3
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala3
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala3
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala3
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala3
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala17
-rw-r--r--bridge/src/sbt-test/source-dependencies/signature-change/test20
-rw-r--r--docs/SyntaxSummary.txt7
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala2
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala16
-rw-r--r--src/dotty/tools/dotc/ast/tpd.scala2
-rw-r--r--src/dotty/tools/dotc/ast/untpd.scala1
-rw-r--r--src/dotty/tools/dotc/config/Config.scala12
-rw-r--r--src/dotty/tools/dotc/core/Constraint.scala17
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala110
-rw-r--r--src/dotty/tools/dotc/core/Definitions.scala73
-rw-r--r--src/dotty/tools/dotc/core/Denotations.scala13
-rw-r--r--src/dotty/tools/dotc/core/Flags.scala3
-rw-r--r--src/dotty/tools/dotc/core/Mode.scala5
-rw-r--r--src/dotty/tools/dotc/core/NameOps.scala23
-rw-r--r--src/dotty/tools/dotc/core/OrderingConstraint.scala72
-rw-r--r--src/dotty/tools/dotc/core/Signature.scala37
-rw-r--r--src/dotty/tools/dotc/core/StdNames.scala18
-rw-r--r--src/dotty/tools/dotc/core/Substituters.scala16
-rw-r--r--src/dotty/tools/dotc/core/SymDenotations.scala59
-rw-r--r--src/dotty/tools/dotc/core/Symbols.scala11
-rw-r--r--src/dotty/tools/dotc/core/TypeApplications.scala616
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala499
-rw-r--r--src/dotty/tools/dotc/core/TypeErasure.scala5
-rw-r--r--src/dotty/tools/dotc/core/TypeOps.scala86
-rw-r--r--src/dotty/tools/dotc/core/TypeParamInfo.scala40
-rw-r--r--src/dotty/tools/dotc/core/TyperState.scala2
-rw-r--r--src/dotty/tools/dotc/core/Types.scala801
-rw-r--r--src/dotty/tools/dotc/core/Uniques.scala4
-rw-r--r--src/dotty/tools/dotc/core/classfile/ClassfileParser.scala6
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyFormat.scala17
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreePickler.scala18
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala37
-rw-r--r--src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala83
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala30
-rw-r--r--src/dotty/tools/dotc/printing/PlainPrinter.scala61
-rw-r--r--src/dotty/tools/dotc/printing/RefinedPrinter.scala74
-rw-r--r--src/dotty/tools/dotc/sbt/ExtractAPI.scala33
-rw-r--r--src/dotty/tools/dotc/sbt/ExtractDependencies.scala3
-rw-r--r--src/dotty/tools/dotc/transform/ElimRepeated.scala2
-rw-r--r--src/dotty/tools/dotc/transform/ElimStaticThis.scala4
-rw-r--r--src/dotty/tools/dotc/transform/FullParameterization.scala10
-rw-r--r--src/dotty/tools/dotc/transform/PostTyper.scala20
-rw-r--r--src/dotty/tools/dotc/transform/SuperAccessors.scala6
-rw-r--r--src/dotty/tools/dotc/typer/Applications.scala11
-rw-r--r--src/dotty/tools/dotc/typer/Checking.scala81
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala18
-rw-r--r--src/dotty/tools/dotc/typer/Inferencing.scala8
-rw-r--r--src/dotty/tools/dotc/typer/Namer.scala86
-rw-r--r--src/dotty/tools/dotc/typer/ProtoTypes.scala10
-rw-r--r--src/dotty/tools/dotc/typer/TypeAssigner.scala22
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala49
-rw-r--r--src/dotty/tools/dotc/typer/Variances.scala19
-rw-r--r--test/dotc/tests.scala2
-rw-r--r--tests/disabled/neg/named-params.scala (renamed from tests/neg/named-params.scala)0
-rw-r--r--tests/disabled/not-representable/pos/t2066.scala6
-rw-r--r--tests/disabled/pos/CollectionStrawMan3.scala (renamed from tests/pos/CollectionStrawMan3.scala)0
-rw-r--r--tests/disabled/pos/flowops.scala (renamed from tests/pos/flowops.scala)0
-rw-r--r--tests/disabled/pos/flowops1.scala (renamed from tests/pos/flowops1.scala)0
-rw-r--r--tests/disabled/pos/hk-named.scala (renamed from tests/pos/hk-named.scala)0
-rw-r--r--tests/disabled/pos/named-params.scala (renamed from tests/pos/named-params.scala)0
-rw-r--r--tests/neg/boundspropagation.scala2
-rw-r--r--tests/neg/existentials.scala61
-rw-r--r--tests/neg/hk-bounds.scala31
-rw-r--r--tests/neg/hk-variance.scala11
-rw-r--r--tests/neg/hklower.scala11
-rw-r--r--tests/neg/hklower2.scala4
-rw-r--r--tests/neg/i39.scala2
-rw-r--r--tests/neg/i50-volatile.scala4
-rw-r--r--tests/neg/kinds.scala18
-rw-r--r--tests/neg/ski.scala23
-rw-r--r--tests/neg/subtyping.scala2
-rw-r--r--tests/neg/t2994.scala6
-rw-r--r--tests/neg/t7278.scala8
-rw-r--r--tests/neg/zoo.scala12
-rw-r--r--tests/pending/pos/apply-equiv.scala (renamed from tests/pos/apply-equiv.scala)0
-rw-r--r--tests/pickling/i94-nada.scala4
-rw-r--r--tests/pos-scala2/GenTraversableFactory.scala (renamed from tests/pos/GenTraversableFactory.scala)0
-rw-r--r--tests/pos-scala2/t2994.scala2
-rw-r--r--tests/pos-scala2/t6014.scala (renamed from tests/pos/t6014.scala)2
-rw-r--r--tests/pos/hk-subtyping.scala13
-rw-r--r--tests/pos/hklower.scala41
-rw-r--r--tests/pos/i1181.scala (renamed from tests/pending/pos/i1181.scala)0
-rw-r--r--tests/pos/i94-nada.scala2
-rw-r--r--tests/pos/jon.scala2
-rw-r--r--tests/pos/lookuprefined.scala6
-rw-r--r--tests/pos/range.scala4
-rw-r--r--tests/pos/t2066.scala25
-rw-r--r--tests/pos/t2613.scala2
-rw-r--r--tests/pos/t2712-1.scala9
-rw-r--r--tests/pos/t2712-2.scala25
-rw-r--r--tests/pos/t2712-3.scala24
-rw-r--r--tests/pos/t2712-4.scala17
-rw-r--r--tests/pos/t2712-5.scala29
-rw-r--r--tests/pos/t2712-6.scala12
-rw-r--r--tests/pos/t2712-7.scala15
-rw-r--r--tests/pos/t5683.scala23
-rw-r--r--tests/pos/tycons.scala22
103 files changed, 2375 insertions, 1417 deletions
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/B.scala b/bridge/src/sbt-test/source-dependencies/signature-change/B.scala
new file mode 100644
index 000000000..e049b23b0
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/B.scala
@@ -0,0 +1,3 @@
+object B {
+ val x: Int = A.f1[Any](1)
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala
new file mode 100644
index 000000000..6cf6a62c2
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A0.scala
@@ -0,0 +1,3 @@
+object A {
+ def f1[T](x: Int): Int = 1
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala
new file mode 100644
index 000000000..d10bcbadc
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A1.scala
@@ -0,0 +1,3 @@
+object A {
+ def f1[T](x: String): Int = 1
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala
new file mode 100644
index 000000000..029dc28d7
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A2.scala
@@ -0,0 +1,3 @@
+object A {
+ def f1[T](x: Int): String = ""
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala
new file mode 100644
index 000000000..69463721f
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/changes/A3.scala
@@ -0,0 +1,3 @@
+object A {
+ def f1[T <: Int](x: Int): Int = 1
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala b/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala
new file mode 100644
index 000000000..3433779b6
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/project/DottyInjectedPlugin.scala
@@ -0,0 +1,17 @@
+import sbt._
+import Keys._
+
+object DottyInjectedPlugin extends AutoPlugin {
+ override def requires = plugins.JvmPlugin
+ override def trigger = allRequirements
+
+ override val projectSettings = Seq(
+ scalaVersion := "0.1-SNAPSHOT",
+ scalaOrganization := "ch.epfl.lamp",
+ scalacOptions += "-language:Scala2",
+ scalaBinaryVersion := "2.11",
+ autoScalaLibrary := false,
+ libraryDependencies ++= Seq("org.scala-lang" % "scala-library" % "2.11.5"),
+ scalaCompilerBridgeSource := ("ch.epfl.lamp" % "dotty-bridge" % "0.1.1-SNAPSHOT" % "component").sources()
+ )
+}
diff --git a/bridge/src/sbt-test/source-dependencies/signature-change/test b/bridge/src/sbt-test/source-dependencies/signature-change/test
new file mode 100644
index 000000000..03ad663ab
--- /dev/null
+++ b/bridge/src/sbt-test/source-dependencies/signature-change/test
@@ -0,0 +1,20 @@
+# Case 1: parameter type changed
+$ copy-file changes/A0.scala A.scala
+> compile
+$ copy-file changes/A1.scala A.scala
+# Compilation of B.scala should fail because the signature of f changed
+-> compile
+
+# Case 2: return type changed
+$ copy-file changes/A0.scala A.scala
+> compile
+$ copy-file changes/A2.scala A.scala
+# Compilation of B.scala should fail because the signature of f changed
+-> compile
+
+# Case 3: type parameter bounds changed
+$ copy-file changes/A0.scala A.scala
+> compile
+$ copy-file changes/A3.scala A.scala
+# Compilation of B.scala should fail because the signature of f changed
+-> compile
diff --git a/docs/SyntaxSummary.txt b/docs/SyntaxSummary.txt
index d4f7ceade..6751c90e2 100644
--- a/docs/SyntaxSummary.txt
+++ b/docs/SyntaxSummary.txt
@@ -96,6 +96,7 @@ grammar.
ClassQualifier ::= `[' id `]'
Type ::= FunArgTypes `=>' Type Function(ts, t)
+ | HkTypeParamClause `->' Type TypeLambda(ps, t)
| InfixType
FunArgTypes ::= InfixType
| `(' [ FunArgType {`,' FunArgType } ] `)'
@@ -125,7 +126,6 @@ grammar.
TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} ContextBounds(typeBounds, tps)
Expr ::= FunParams `=>' Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr)
- | Expr1
FunParams ::= Bindings
| [`implicit'] id
| `_'
@@ -225,7 +225,8 @@ grammar.
TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds
HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
- HkTypeParam ::= {Annotation} ['+' | `-'] (Id | `_') TypeBounds
+ HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypeParamClause] | `_')
+ TypeBounds
ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)']
ClsParamClause ::= [nl] `(' [ClsParams] ')'
@@ -280,7 +281,7 @@ grammar.
DefDcl ::= DefSig [`:' Type] DefDef(_, name, tparams, vparamss, tpe, EmptyTree)
DefSig ::= id [DefTypeParamClause] DefParamClauses
TypeDcl ::= id [TypTypeParamClause] ['=' Type] TypeDefTree(_, name, tparams, tpt)
- | id [HkParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds)
+ | id [HkTypeParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds)
Def ::= `val' PatDef
| `var' VarDef
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index f603f6817..a9705e209 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -66,7 +66,7 @@ object desugar {
val relocate = new TypeMap {
val originalOwner = sym.owner
def apply(tp: Type) = tp match {
- case tp: NamedType if tp.symbol.owner eq originalOwner =>
+ case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) =>
val defctx = ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next
var local = defctx.denotNamed(tp.name).suchThat(_ is ParamOrAccessor).symbol
if (local.exists) (defctx.owner.thisType select local).dealias
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index 7463449c5..20ae02994 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -594,6 +594,12 @@ object Trees {
def forwardTo = tpt
}
+ /** [typeparams] -> tpt */
+ case class TypeLambdaTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = TypeLambdaTree[T]
+ }
+
/** => T */
case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])
extends TypTree[T] {
@@ -851,6 +857,7 @@ object Trees {
type OrTypeTree = Trees.OrTypeTree[T]
type RefinedTypeTree = Trees.RefinedTypeTree[T]
type AppliedTypeTree = Trees.AppliedTypeTree[T]
+ type TypeLambdaTree = Trees.TypeLambdaTree[T]
type ByNameTypeTree = Trees.ByNameTypeTree[T]
type TypeBoundsTree = Trees.TypeBoundsTree[T]
type Bind = Trees.Bind[T]
@@ -1028,6 +1035,10 @@ object Trees {
case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree
case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args))
}
+ def TypeLambdaTree(tree: Tree)(tparams: List[TypeDef], body: Tree): TypeLambdaTree = tree match {
+ case tree: TypeLambdaTree if (tparams eq tree.tparams) && (body eq tree.body) => tree
+ case _ => finalize(tree, untpd.TypeLambdaTree(tparams, body))
+ }
def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match {
case tree: ByNameTypeTree if result eq tree.result => tree
case _ => finalize(tree, untpd.ByNameTypeTree(result))
@@ -1160,6 +1171,8 @@ object Trees {
cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements))
case AppliedTypeTree(tpt, args) =>
cpy.AppliedTypeTree(tree)(transform(tpt), transform(args))
+ case TypeLambdaTree(tparams, body) =>
+ cpy.TypeLambdaTree(tree)(transformSub(tparams), transform(body))
case ByNameTypeTree(result) =>
cpy.ByNameTypeTree(tree)(transform(result))
case TypeBoundsTree(lo, hi) =>
@@ -1264,6 +1277,9 @@ object Trees {
this(this(x, tpt), refinements)
case AppliedTypeTree(tpt, args) =>
this(this(x, tpt), args)
+ case TypeLambdaTree(tparams, body) =>
+ implicit val ctx: Context = localCtx
+ this(this(x, tparams), body)
case ByNameTypeTree(result) =>
this(x, result)
case TypeBoundsTree(lo, hi) =>
diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala
index eff054030..4593b9554 100644
--- a/src/dotty/tools/dotc/ast/tpd.scala
+++ b/src/dotty/tools/dotc/ast/tpd.scala
@@ -21,7 +21,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
private def ta(implicit ctx: Context) = ctx.typeAssigner
def Modifiers(sym: Symbol)(implicit ctx: Context): Modifiers = Modifiers(
- sym.flags & ModifierFlags,
+ sym.flags & (if (sym.isType) ModifierFlags | VarianceFlags else ModifierFlags),
if (sym.privateWithin.exists) sym.privateWithin.asType.name else tpnme.EMPTY,
sym.annotations map (_.tree))
diff --git a/src/dotty/tools/dotc/ast/untpd.scala b/src/dotty/tools/dotc/ast/untpd.scala
index c7a7036c3..b3f8747dc 100644
--- a/src/dotty/tools/dotc/ast/untpd.scala
+++ b/src/dotty/tools/dotc/ast/untpd.scala
@@ -137,6 +137,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def OrTypeTree(left: Tree, right: Tree): OrTypeTree = new OrTypeTree(left, right)
def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements)
def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args)
+ def TypeLambdaTree(tparams: List[TypeDef], body: Tree): TypeLambdaTree = new TypeLambdaTree(tparams, body)
def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result)
def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi)
def Bind(name: Name, body: Tree): Bind = new Bind(name, body)
diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala
index 3cc3091b5..a50945108 100644
--- a/src/dotty/tools/dotc/config/Config.scala
+++ b/src/dotty/tools/dotc/config/Config.scala
@@ -72,10 +72,9 @@ object Config {
/** If this flag is set, take the fast path when comparing same-named type-aliases and types */
final val fastPathForRefinedSubtype = true
- /** If this flag is set, $apply projections are checked that they apply to a
- * higher-kinded type.
+ /** If this flag is set, higher-kinded applications are checked for validity
*/
- final val checkProjections = false
+ final val checkHKApplications = false
/** The recursion depth for showing a summarized string */
final val summarizeDepth = 2
@@ -98,6 +97,13 @@ object Config {
*/
final val splitProjections = false
+ /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for
+ * `[Xs] -> U` to `[Xs := Ts]U`. If this flag is off, the rewriting is only done if `S` is a
+ * reference to an instantiated parameter. Turning this flag on was observed to
+ * give a ~6% speedup on the JUnit test suite.
+ */
+ final val simplifyApplications = true
+
/** Initial size of superId table */
final val InitialSuperIdsSize = 4096
diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala
index 19f93ce47..e10523753 100644
--- a/src/dotty/tools/dotc/core/Constraint.scala
+++ b/src/dotty/tools/dotc/core/Constraint.scala
@@ -23,7 +23,7 @@ abstract class Constraint extends Showable {
type This <: Constraint
/** Does the constraint's domain contain the type parameters of `pt`? */
- def contains(pt: PolyType): Boolean
+ def contains(pt: GenericType): Boolean
/** Does the constraint's domain contain the type parameter `param`? */
def contains(param: PolyParam): Boolean
@@ -79,7 +79,7 @@ abstract class Constraint extends Showable {
* satisfiability but will solved to give instances of
* type variables.
*/
- def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This
+ def add(poly: GenericType, tvars: List[TypeVar])(implicit ctx: Context): This
/** A new constraint which is derived from this constraint by updating
* the entry for parameter `param` to `tp`.
@@ -117,18 +117,17 @@ abstract class Constraint extends Showable {
*/
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
- /** Is entry associated with `pt` removable?
- * @param removedParam The index of a parameter which is still present in the
- * entry array, but is going to be removed at the same step,
- * or -1 if no such parameter exists.
+ /** Is entry associated with `pt` removable? This is the case if
+ * all type parameters of the entry are associated with type variables
+ * which have their `inst` fields set.
*/
- def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean
+ def isRemovable(pt: GenericType): Boolean
/** A new constraint with all entries coming from `pt` removed. */
- def remove(pt: PolyType)(implicit ctx: Context): This
+ def remove(pt: GenericType)(implicit ctx: Context): This
/** The polytypes constrained by this constraint */
- def domainPolys: List[PolyType]
+ def domainPolys: List[GenericType]
/** The polytype parameters constrained by this constraint */
def domainParams: List[PolyParam]
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index f8eae186a..18e47a7f2 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -6,6 +6,7 @@ import Types._, Contexts._, Symbols._
import Decorators._
import config.Config
import config.Printers._
+import TypeApplications.EtaExpansion
import collection.mutable
/** Methods for adding constraints and solving them.
@@ -34,6 +35,11 @@ trait ConstraintHandling {
/** If the constraint is frozen we cannot add new bounds to the constraint. */
protected var frozenConstraint = false
+ /** We are currently comparing lambdas. Used as a flag for
+ * optimization: when `false`, no need to do an expensive `pruneLambdaParams`
+ */
+ protected var comparingLambdas = false
+
private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
!constraint.contains(param) || {
def occursIn(bound: Type): Boolean = {
@@ -163,12 +169,64 @@ trait ConstraintHandling {
}
}
}
+ assert(constraint.contains(param))
val bound = if (fromBelow) constraint.fullLowerBound(param) else constraint.fullUpperBound(param)
val inst = avoidParam(bound)
typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}")
inst
}
+ /** The instance type of `param` in the current constraint (which contains `param`).
+ * If `fromBelow` is true, the instance type is the lub of the parameter's
+ * lower bounds; otherwise it is the glb of its upper bounds. However,
+ * a lower bound instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instanceType(param: PolyParam, fromBelow: Boolean): Type = {
+ def upperBound = constraint.fullUpperBound(param)
+ def isSingleton(tp: Type): Boolean = tp match {
+ case tp: SingletonType => true
+ case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
+ case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
+ case _ => false
+ }
+ def isFullyDefined(tp: Type): Boolean = tp match {
+ case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
+ case tp: TypeProxy => isFullyDefined(tp.underlying)
+ case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
+ case _ => true
+ }
+ def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
+ case tp: OrType => true
+ case tp: RefinedOrRecType => isOrType(tp.parent)
+ case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
+ case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
+ case _ => false
+ }
+
+ // First, solve the constraint.
+ var inst = approximation(param, fromBelow)
+
+ // Then, approximate by (1.) - (3.) and simplify as follows.
+ // 1. If instance is from below and is a singleton type, yet
+ // upper bound is not a singleton type, widen the instance.
+ if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
+ inst = inst.widen
+
+ inst = inst.simplified
+
+ // 2. If instance is from below and is a fully-defined union type, yet upper bound
+ // is not a union type, approximate the union type from above by an intersection
+ // of all common base types.
+ if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
+ inst = inst.approximateUnion
+
+ // 3. If instance is from below, and upper bound has open named parameters
+ // make sure the instance has all named parameters of the bound.
+ if (fromBelow) inst = inst.widenToNamedTypeParams(param.namedTypeParams)
+ inst
+ }
+
/** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have
* for all poly params `p` defined in `c2` as `p >: L2 <: U2`:
*
@@ -193,9 +251,9 @@ trait ConstraintHandling {
}
/** The current bounds of type parameter `param` */
- final def bounds(param: PolyParam): TypeBounds = constraint.entry(param) match {
- case bounds: TypeBounds => bounds
- case _ => param.binder.paramBounds(param.paramNum)
+ final def bounds(param: PolyParam): TypeBounds = {
+ val e = constraint.entry(param)
+ if (e.exists) e.bounds else param.binder.paramBounds(param.paramNum)
}
/** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
@@ -236,6 +294,36 @@ trait ConstraintHandling {
checkPropagated(s"added $description") {
addConstraintInvocations += 1
+ /** When comparing lambdas we might get constraints such as
+ * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter
+ * and `X0` is a lambda parameter. The constraint for `A` is not allowed
+ * to refer to such a lambda parameter because the lambda parameter is
+ * not visible where `A` is defined. Consequently, we need to
+ * approximate the bound so that the lambda parameter does not appear in it.
+ * If `tp` is an upper bound, we need to approximate with something smaller,
+ * otherwise something larger.
+ * Test case in pos/i94-nada.scala. This test crashes with an illegal instance
+ * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is
+ * missing.
+ */
+ def pruneLambdaParams(tp: Type) =
+ if (comparingLambdas && param.binder.isInstanceOf[PolyType]) {
+ val approx = new ApproximatingTypeMap {
+ def apply(t: Type): Type = t match {
+ case t @ PolyParam(tl: TypeLambda, n) =>
+ val effectiveVariance = if (fromBelow) -variance else variance
+ val bounds = tl.paramBounds(n)
+ if (effectiveVariance > 0) bounds.lo
+ else if (effectiveVariance < 0) bounds.hi
+ else NoType
+ case _ =>
+ mapOver(t)
+ }
+ }
+ approx(tp)
+ }
+ else tp
+
def addParamBound(bound: PolyParam) =
if (fromBelow) addLess(bound, param) else addLess(param, bound)
@@ -281,12 +369,18 @@ trait ConstraintHandling {
else NoType
case bound: TypeVar if constraint contains bound.origin =>
prune(bound.underlying)
- case bound: PolyParam if constraint contains bound =>
- if (!addParamBound(bound)) NoType
- else if (fromBelow) defn.NothingType
- else defn.AnyType
+ case bound: PolyParam =>
+ constraint.entry(bound) match {
+ case NoType => pruneLambdaParams(bound)
+ case _: TypeBounds =>
+ if (!addParamBound(bound)) NoType
+ else if (fromBelow) defn.NothingType
+ else defn.AnyType
+ case inst =>
+ prune(inst)
+ }
case _ =>
- bound
+ pruneLambdaParams(bound)
}
try bound match {
diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala
index 5cb373cfd..8d020a428 100644
--- a/src/dotty/tools/dotc/core/Definitions.scala
+++ b/src/dotty/tools/dotc/core/Definitions.scala
@@ -86,17 +86,17 @@ class Definitions {
}
private def newPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int,
- resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = {
+ resultTypeFn: GenericType => Type, flags: FlagSet = EmptyFlags) = {
val tparamNames = tpnme.syntheticTypeParamNames(typeParamCount)
val tparamBounds = tparamNames map (_ => TypeBounds.empty)
val ptype = PolyType(tparamNames)(_ => tparamBounds, resultTypeFn)
newMethod(cls, name, ptype, flags)
}
- private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: GenericType => Type, flags: FlagSet) =
newPolyMethod(cls, name, 1, resultTypeFn, flags)
- private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: GenericType => Type, flags: FlagSet) =
newPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags)
private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef] = {
@@ -167,7 +167,7 @@ class Definitions {
lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
- lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef), Final)
+ lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
@@ -663,71 +663,6 @@ class Definitions {
def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1
- // ----- LambdaXYZ traits ------------------------------------------
-
- private var myLambdaTraits: Set[Symbol] = Set()
-
- /** The set of HigherKindedXYZ traits encountered so far */
- def lambdaTraits: Set[Symbol] = myLambdaTraits
-
- private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]()
-
- /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed
- * to be the type parameters of a higher-kided type). This is a class symbol that
- * would be generated by the following schema.
- *
- * trait LambdaXYZ extends Object with P1 with ... with Pn {
- * type v_1 hk$0; ...; type v_N hk$N;
- * type +$Apply
- * }
- *
- * Here:
- *
- * - v_i are the variances of the bound symbols (i.e. +, -, or empty).
- * - XYZ is a string of length N with one letter for each variant of a bound symbol,
- * using `P` (positive variance), `N` (negative variance), `I` (invariant).
- * - for each positive or negative variance v_i there is a parent trait Pj which
- * is the same as LambdaXYZ except that it has `I` in i-th position.
- */
- def LambdaTrait(vcs: List[Int]): ClassSymbol = {
- assert(vcs.nonEmpty)
-
- def varianceFlags(v: Int) = v match {
- case -1 => Contravariant
- case 0 => EmptyFlags
- case 1 => Covariant
- }
-
- val completer = new LazyType {
- def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
- val cls = denot.asClass.classSymbol
- val paramDecls = newScope
- for (i <- 0 until vcs.length)
- newTypeParam(cls, tpnme.hkArg(i), varianceFlags(vcs(i)), paramDecls)
- newTypeField(cls, tpnme.hkApply, Covariant, paramDecls)
- val parentTraitRefs =
- for (i <- 0 until vcs.length if vcs(i) != 0)
- yield LambdaTrait(vcs.updated(i, 0)).typeRef
- denot.info = ClassInfo(
- ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls)
- }
- }
-
- val traitName = tpnme.hkLambda(vcs)
-
- def createTrait = {
- val cls = newClassSymbol(
- ScalaPackageClass,
- traitName,
- PureInterfaceCreationFlags | Synthetic,
- completer)
- myLambdaTraits += cls
- cls
- }
-
- LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait)
- }
-
// ----- primitive value class machinery ------------------------------------------
/** This class would also be obviated by the implicit function type design */
diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala
index 5ce8cbcd8..09971d1d1 100644
--- a/src/dotty/tools/dotc/core/Denotations.scala
+++ b/src/dotty/tools/dotc/core/Denotations.scala
@@ -146,6 +146,9 @@ object Denotations {
/** Is this denotation different from NoDenotation or an ErrorDenotation? */
def exists: Boolean = true
+ /** A denotation with the info of this denotation transformed using `f` */
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation
+
/** If this denotation does not exist, fallback to alternative */
final def orElse(that: => Denotation) = if (this.exists) this else that
@@ -242,7 +245,7 @@ object Denotations {
}
else if (exists && !qualifies(symbol)) NoDenotation
else asSingleDenotation
- }
+ }
/** Form a denotation by conjoining with denotation `that`.
*
@@ -456,6 +459,8 @@ object Denotations {
else if (!d2.exists) d1
else derivedMultiDenotation(d1, d2)
}
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.mapInfo(f), denot2.mapInfo(f))
def derivedMultiDenotation(d1: Denotation, d2: Denotation) =
if ((d1 eq denot1) && (d2 eq denot2)) this else MultiDenotation(d1, d2)
override def toString = alternatives.mkString(" <and> ")
@@ -488,6 +493,9 @@ object Denotations {
if ((symbol eq this.symbol) && (info eq this.info)) this
else newLikeThis(symbol, info)
+ def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation =
+ derivedSingleDenotation(symbol, f(info))
+
def orElse(that: => SingleDenotation) = if (this.exists) this else that
def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
@@ -593,7 +601,8 @@ object Denotations {
*/
private def bringForward()(implicit ctx: Context): SingleDenotation = this match {
case denot: SymDenotation if ctx.stillValid(denot) =>
- assert(ctx.runId > validFor.runId, s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
+ assert(ctx.runId > validFor.runId || ctx.settings.YtestPickler.value, // mixing test pickler with debug printing can travel back in time
+ s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
var d: SingleDenotation = denot
do {
d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId)
diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala
index cd660aa46..bdd6bbdcf 100644
--- a/src/dotty/tools/dotc/core/Flags.scala
+++ b/src/dotty/tools/dotc/core/Flags.scala
@@ -436,7 +436,8 @@ object Flags {
/** Flags representing modifiers that can appear in trees */
final val ModifierFlags =
- SourceModifierFlags | Module | Param | Synthetic | Package | Local | commonFlags(Mutable)
+ SourceModifierFlags | Module | Param | Synthetic | Package | Local |
+ commonFlags(Mutable)
// | Trait is subsumed by commonFlags(Lazy) from SourceModifierFlags
assert(ModifierFlags.isTermFlags && ModifierFlags.isTypeFlags)
diff --git a/src/dotty/tools/dotc/core/Mode.scala b/src/dotty/tools/dotc/core/Mode.scala
index 0e188ace2..3e9b7effe 100644
--- a/src/dotty/tools/dotc/core/Mode.scala
+++ b/src/dotty/tools/dotc/core/Mode.scala
@@ -84,5 +84,10 @@ object Mode {
/** Use Scala2 scheme for overloading and implicit resolution */
val OldOverloadingResolution = newMode(14, "OldOverloadingResolution")
+ /** Allow hk applications of type lambdas to wildcard arguments;
+ * used for checking that such applications do not normally arise
+ */
+ val AllowLambdaWildcardApply = newMode(15, "AllowHKApplyToWildcards")
+
val PatternOrType = Pattern | Type
}
diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala
index 17af899e9..f5e0eb8cd 100644
--- a/src/dotty/tools/dotc/core/NameOps.scala
+++ b/src/dotty/tools/dotc/core/NameOps.scala
@@ -101,29 +101,6 @@ object NameOps {
name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head)
}
- /** Is this the name of a higher-kinded type parameter of a Lambda? */
- def isHkArgName =
- name.length > 0 &&
- name.head == tpnme.hkArgPrefixHead &&
- name.startsWith(tpnme.hkArgPrefix) && {
- val digits = name.drop(tpnme.hkArgPrefixLength)
- digits.length <= 4 && digits.forall(_.isDigit)
- }
-
- /** The index of the higher-kinded type parameter with this name.
- * Pre: isLambdaArgName.
- */
- def hkArgIndex: Int =
- name.drop(tpnme.hkArgPrefixLength).toString.toInt
-
- def isLambdaTraitName(implicit ctx: Context): Boolean =
- name.isTypeName && name.startsWith(tpnme.hkLambdaPrefix)
-
- def lambdaTraitVariances(implicit ctx: Context): List[Int] = {
- val vs = name.drop(tpnme.hkLambdaPrefix.length)
- vs.toList.map(c => tpnme.varianceSuffixes.indexOf(c) - 1)
- }
-
/** If the name ends with $nn where nn are
* all digits, strip the $ and the digits.
* Otherwise return the argument.
diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala
index e818862cb..b0170b67c 100644
--- a/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -11,14 +11,15 @@ import config.Config
import config.Printers._
import collection.immutable.BitSet
import reflect.ClassTag
+import annotation.tailrec
object OrderingConstraint {
/** The type of `OrderingConstraint#boundsMap` */
- type ParamBounds = SimpleMap[PolyType, Array[Type]]
+ type ParamBounds = SimpleMap[GenericType, Array[Type]]
/** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
- type ParamOrdering = SimpleMap[PolyType, Array[List[PolyParam]]]
+ type ParamOrdering = SimpleMap[GenericType, Array[List[PolyParam]]]
/** A new constraint with given maps */
private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
@@ -30,11 +31,11 @@ object OrderingConstraint {
/** A lens for updating a single entry array in one of the three constraint maps */
abstract class ConstraintLens[T <: AnyRef: ClassTag] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[T]
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
+ def entries(c: OrderingConstraint, poly: GenericType): Array[T]
+ def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
def initial: T
- def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = {
+ def apply(c: OrderingConstraint, poly: GenericType, idx: Int) = {
val es = entries(c, poly)
if (es == null) initial else es(idx)
}
@@ -45,7 +46,7 @@ object OrderingConstraint {
* parts of `current` which are not shared by `prev`.
*/
def update(prev: OrderingConstraint, current: OrderingConstraint,
- poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
+ poly: GenericType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
var es = entries(current, poly)
if (es != null && (es(idx) eq entry)) current
else {
@@ -70,7 +71,7 @@ object OrderingConstraint {
update(prev, current, param.binder, param.paramNum, entry)
def map(prev: OrderingConstraint, current: OrderingConstraint,
- poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ poly: GenericType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
update(prev, current, poly, idx, f(apply(current, poly, idx)))
def map(prev: OrderingConstraint, current: OrderingConstraint,
@@ -79,25 +80,25 @@ object OrderingConstraint {
}
val boundsLens = new ConstraintLens[Type] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
+ def entries(c: OrderingConstraint, poly: GenericType): Array[Type] =
c.boundsMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap)
def initial = NoType
}
val lowerLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: GenericType): Array[List[PolyParam]] =
c.lowerMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap)
def initial = Nil
}
val upperLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: GenericType): Array[List[PolyParam]] =
c.upperMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: GenericType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries))
def initial = Nil
}
@@ -147,11 +148,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
// ----------- Contains tests --------------------------------------------------
- def contains(pt: PolyType): Boolean = boundsMap(pt) != null
+ def contains(pt: GenericType): Boolean = boundsMap(pt) != null
def contains(param: PolyParam): Boolean = {
val entries = boundsMap(param.binder)
- entries != null && entries(param.paramNum).isInstanceOf[TypeBounds]
+ entries != null && isBounds(entries(param.paramNum))
}
def contains(tvar: TypeVar): Boolean = {
@@ -278,7 +279,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
stripParams(tp, paramBuf, isUpper)
.orElse(if (isUpper) defn.AnyType else defn.NothingType)
- def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = {
+ def add(poly: GenericType, tvars: List[TypeVar])(implicit ctx: Context): This = {
assert(!contains(poly))
val nparams = poly.paramNames.length
val entries1 = new Array[Type](nparams * 2)
@@ -291,7 +292,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* Update all bounds to be normalized and update ordering to account for
* dependent parameters.
*/
- private def init(poly: PolyType)(implicit ctx: Context): This = {
+ private def init(poly: GenericType)(implicit ctx: Context): This = {
var current = this
val loBuf, hiBuf = new mutable.ListBuffer[PolyParam]
var i = 0
@@ -398,7 +399,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def removeParam(ps: List[PolyParam]) =
ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx)
- def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int): Type = tp match {
+ def replaceParam(tp: Type, atPoly: GenericType, atIdx: Int): Type = tp match {
case bounds @ TypeBounds(lo, hi) =>
def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = {
@@ -419,7 +420,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def replaceIn(tp: Type, isUpper: Boolean): Type = tp match {
case `param` => normalize(replacement, isUpper)
case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
- case _ => tp
+ case _ => tp.substParam(param, replacement)
}
bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
@@ -428,7 +429,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
var current =
- if (isRemovable(poly, idx)) remove(poly) else updateEntry(param, replacement)
+ if (isRemovable(poly)) remove(poly) else updateEntry(param, replacement)
current.foreachParam {(p, i) =>
current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
current = lowerLens.map(this, current, p, i, removeParam)
@@ -438,9 +439,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
}
- def remove(pt: PolyType)(implicit ctx: Context): This = {
+ def remove(pt: GenericType)(implicit ctx: Context): This = {
def removeFromOrdering(po: ParamOrdering) = {
- def removeFromBoundss(key: PolyType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = {
+ def removeFromBoundss(key: GenericType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = {
val bndss1 = bndss.map(_.filterConserve(_.binder ne pt))
if (bndss.corresponds(bndss1)(_ eq _)) bndss else bndss1
}
@@ -449,31 +450,26 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap))
}
- def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean = {
+ def isRemovable(pt: GenericType): Boolean = {
val entries = boundsMap(pt)
- var noneLeft = true
- var i = paramCount(entries)
- while (noneLeft && i > 0) {
- i -= 1
- if (i != removedParam && isBounds(entries(i))) noneLeft = false
- else typeVar(entries, i) match {
- case tv: TypeVar =>
- if (!tv.inst.exists) noneLeft = false // need to keep line around to compute instType
- case _ =>
+ @tailrec def allRemovable(last: Int): Boolean =
+ if (last < 0) true
+ else typeVar(entries, last) match {
+ case tv: TypeVar => tv.inst.exists && allRemovable(last - 1)
+ case _ => false
}
- }
- noneLeft
+ allRemovable(paramCount(entries) - 1)
}
// ---------- Exploration --------------------------------------------------------
- def domainPolys: List[PolyType] = boundsMap.keys
+ def domainPolys: List[GenericType] = boundsMap.keys
def domainParams: List[PolyParam] =
for {
(poly, entries) <- boundsMap.toList
n <- 0 until paramCount(entries)
- if isBounds(entries(n))
+ if entries(n).exists
} yield PolyParam(poly, n)
def forallParams(p: PolyParam => Boolean): Boolean = {
@@ -484,7 +480,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
true
}
- def foreachParam(p: (PolyType, Int) => Unit): Unit =
+ def foreachParam(p: (GenericType, Int) => Unit): Unit =
boundsMap.foreachBinding { (poly, entries) =>
0.until(poly.paramNames.length).foreach(p(poly, _))
}
@@ -501,7 +497,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
override def checkClosed()(implicit ctx: Context): Unit = {
def isFreePolyParam(tp: Type) = tp match {
- case PolyParam(binder, _) => !contains(binder)
+ case PolyParam(binder: GenericType, _) => !contains(binder)
case _ => false
}
def checkClosedType(tp: Type, where: String) =
diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala
index 54771bae5..b2e627cbe 100644
--- a/src/dotty/tools/dotc/core/Signature.scala
+++ b/src/dotty/tools/dotc/core/Signature.scala
@@ -22,22 +22,40 @@ import TypeErasure.sigName
* "scala.String".toTypeName)
*
* The signatures of non-method types are always `NotAMethod`.
+ *
+ * There are three kinds of "missing" parts of signatures:
+ *
+ * - tpnme.EMPTY Result type marker for NotAMethod and OverloadedSignature
+ * - tpnme.WILDCARD Arises from a Wildcard or error type
+ * - tpnme.Uninstantiated Arises from an uninstantiated type variable
*/
case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
import Signature._
- /** Does this signature coincide with that signature on their parameter parts? */
- final def sameParams(that: Signature): Boolean = this.paramsSig == that.paramsSig
+ /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */
+ private def consistent(name1: TypeName, name2: TypeName) =
+ name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated
+
+ /** Does this signature coincide with that signature on their parameter parts?
+ * This is the case if all parameter names are _consistent_, i.e. they are either
+ * equal or on of them is tpnme.Uninstantiated.
+ */
+ final def consistentParams(that: Signature): Boolean = {
+ def loop(names1: List[TypeName], names2: List[TypeName]): Boolean =
+ if (names1.isEmpty) names2.isEmpty
+ else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail)
+ loop(this.paramsSig, that.paramsSig)
+ }
/** The degree to which this signature matches `that`.
- * If both parameter and result type names match (i.e. they are the same
+ * If parameter names are consistent and result types names match (i.e. they are the same
* or one is a wildcard), the result is `FullMatch`.
- * If only the parameter names match, the result is `ParamMatch` before erasure and
+ * If only the parameter names are consistent, the result is `ParamMatch` before erasure and
* `NoMatch` otherwise.
- * If the parameters do not match, the result is always `NoMatch`.
+ * If the parameters are inconsistent, the result is always `NoMatch`.
*/
final def matchDegree(that: Signature)(implicit ctx: Context): MatchDegree =
- if (sameParams(that))
+ if (consistentParams(that))
if (resSig == that.resSig || isWildcard(resSig) || isWildcard(that.resSig)) FullMatch
else if (!ctx.erasedTypes) ParamMatch
else NoMatch
@@ -52,6 +70,13 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context) =
Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig)
+ /** A signature is under-defined if its paramsSig part contains at least one
+ * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature
+ * of a type that still contains uninstantiated type variables. They are eliminated
+ * by `fixSignature` in `PostTyper`.
+ */
+ def isUnderDefined(implicit ctx: Context) =
+ paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated
}
object Signature {
diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala
index 81f6da0e2..f9ede23c5 100644
--- a/src/dotty/tools/dotc/core/StdNames.scala
+++ b/src/dotty/tools/dotc/core/StdNames.scala
@@ -529,12 +529,6 @@ object StdNames {
val synthSwitch: N = "$synthSwitch"
- val hkApply: N = "$Apply"
- val hkArgPrefix: N = "$hk"
- val hkLambdaPrefix: N = "Lambda$"
- val hkArgPrefixHead: Char = hkArgPrefix.head
- val hkArgPrefixLength: Int = hkArgPrefix.length
-
// unencoded operators
object raw {
final val AMP : N = "&"
@@ -739,18 +733,18 @@ object StdNames {
class ScalaTypeNames extends ScalaNames[TypeName] {
protected implicit def fromString(s: String): TypeName = typeName(s)
- @switch def syntheticTypeParamName(i: Int): TypeName = "T" + i
+ def syntheticTypeParamName(i: Int): TypeName = "T" + i
+ def syntheticLambdaParamName(i: Int): TypeName = "X" + i
def syntheticTypeParamNames(num: Int): List[TypeName] =
(0 until num).map(syntheticTypeParamName)(breakOut)
- def hkLambda(vcs: List[Int]): TypeName = hkLambdaPrefix ++ vcs.map(varianceSuffix).mkString
- def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString
-
- def varianceSuffix(v: Int): Char = varianceSuffixes.charAt(v + 1)
- val varianceSuffixes = "NIP"
+ def syntheticLambdaParamNames(num: Int): List[TypeName] =
+ (0 until num).map(syntheticLambdaParamName)(breakOut)
final val Conforms = encode("<:<")
+
+ final val Uninstantiated: TypeName = "?$"
}
abstract class JavaNames[N <: Name] extends DefinedNames[N] {
diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala
index 0083ac626..0d1c78e2f 100644
--- a/src/dotty/tools/dotc/core/Substituters.scala
+++ b/src/dotty/tools/dotc/core/Substituters.scala
@@ -179,21 +179,21 @@ trait Substituters { this: Context =>
.mapOver(tp)
}
- final def substRefinedThis(tp: Type, from: Type, to: Type, theMap: SubstRefinedThisMap): Type =
+ final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type =
tp match {
- case tp @ RefinedThis(binder) =>
+ case tp @ RecThis(binder) =>
if (binder eq from) to else tp
case tp: NamedType =>
if (tp.currentSymbol.isStatic) tp
- else tp.derivedSelect(substRefinedThis(tp.prefix, from, to, theMap))
+ else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap))
case _: ThisType | _: BoundType | NoPrefix =>
tp
case tp: RefinedType =>
- tp.derivedRefinedType(substRefinedThis(tp.parent, from, to, theMap), tp.refinedName, substRefinedThis(tp.refinedInfo, from, to, theMap))
+ tp.derivedRefinedType(substRecThis(tp.parent, from, to, theMap), tp.refinedName, substRecThis(tp.refinedInfo, from, to, theMap))
case tp: TypeAlias =>
- tp.derivedTypeAlias(substRefinedThis(tp.alias, from, to, theMap))
+ tp.derivedTypeAlias(substRecThis(tp.alias, from, to, theMap))
case _ =>
- (if (theMap != null) theMap else new SubstRefinedThisMap(from, to))
+ (if (theMap != null) theMap else new SubstRecThisMap(from, to))
.mapOver(tp)
}
@@ -266,8 +266,8 @@ trait Substituters { this: Context =>
def apply(tp: Type): Type = substThis(tp, from, to, this)
}
- final class SubstRefinedThisMap(from: Type, to: Type) extends DeepTypeMap {
- def apply(tp: Type): Type = substRefinedThis(tp, from, to, this)
+ final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substRecThis(tp, from, to, this)
}
final class SubstParamMap(from: ParamType, to: Type) extends DeepTypeMap {
diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala
index 5c4e120a8..16c77ac30 100644
--- a/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -478,10 +478,6 @@ object SymDenotations {
final def isRefinementClass(implicit ctx: Context): Boolean =
name.decode == tpnme.REFINE_CLASS
- /** is this symbol a trait representing a type lambda? */
- final def isLambdaTrait(implicit ctx: Context): Boolean =
- isClass && name.startsWith(tpnme.hkLambdaPrefix) && owner == defn.ScalaPackageClass
-
/** Is this symbol a package object or its module class? */
def isPackageObject(implicit ctx: Context): Boolean = {
val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE
@@ -1121,13 +1117,15 @@ object SymDenotations {
def debugString = toString + "#" + symbol.id // !!! DEBUG
- def hasSkolems(tp: Type): Boolean = tp match {
+ def hasSkolems(tp: Type): Boolean = tp match {
case tp: SkolemType => true
case tp: NamedType => hasSkolems(tp.prefix)
case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo)
- case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType)
+ case tp: RecType => hasSkolems(tp.parent)
+ case tp: GenericType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType)
case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType)
case tp: ExprType => hasSkolems(tp.resType)
+ case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems)
case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2)
case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi)
case tp: AnnotatedType => hasSkolems(tp.tpe)
@@ -1210,15 +1208,25 @@ object SymDenotations {
private[this] var myNamedTypeParams: Set[TypeSymbol] = _
+ /** The type parameters in this class, in the order they appear in the current
+ * scope `decls`. This might be temporarily the incorrect order when
+ * reading Scala2 pickled info. The problem is fixed by `updateTypeParams`
+ * which is called once an unpickled symbol has been completed.
+ */
+ private def typeParamsFromDecls(implicit ctx: Context) =
+ unforcedDecls.filter(sym =>
+ (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
+
/** The type parameters of this class */
override final def typeParams(implicit ctx: Context): List[TypeSymbol] = {
- def computeTypeParams = {
- if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
- else if (this ne initial) initial.asSymDenotation.typeParams
- else unforcedDecls.filter(sym =>
- (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
- }
- if (myTypeParams == null) myTypeParams = computeTypeParams
+ if (myTypeParams == null)
+ myTypeParams =
+ if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
+ else if (this ne initial) initial.asSymDenotation.typeParams
+ else infoOrCompleter match {
+ case info: TypeParamsCompleter => info.completerTypeParams(symbol)
+ case _ => typeParamsFromDecls
+ }
myTypeParams
}
@@ -1534,19 +1542,20 @@ object SymDenotations {
if (myMemberCache != null) myMemberCache invalidate sym.name
}
- /** Make sure the type parameters of this class are `tparams`, reorder definitions
- * in scope if necessary.
- * @pre All type parameters in `tparams` are entered in class scope `info.decls`.
+ /** Make sure the type parameters of this class appear in the order given
+ * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary.
*/
- def updateTypeParams(tparams: List[Symbol])(implicit ctx: Context): Unit =
- if (!typeParams.corresponds(tparams)(_.name == _.name)) {
+ def ensureTypeParamsInCorrectOrder()(implicit ctx: Context): Unit = {
+ val tparams = typeParams
+ if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) {
val decls = info.decls
val decls1 = newScope
- for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name))
- for (sym <- decls) if (!typeParams.contains(sym)) decls1.enter(sym)
+ for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name))
+ for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym)
info = classInfo.derivedClassInfo(decls = decls1)
myTypeParams = null
}
+ }
/** All members of this class that have the given name.
* The elements of the returned pre-denotation all
@@ -1633,6 +1642,7 @@ object SymDenotations {
*/
def isCachable(tp: Type): Boolean = tp match {
case _: TypeErasure.ErasedValueType => false
+ case tp: TypeRef if tp.symbol.isClass => true
case tp: TypeVar => tp.inst.exists && inCache(tp.inst)
case tp: TypeProxy => inCache(tp.underlying)
case tp: AndOrType => inCache(tp.tp1) && inCache(tp.tp2)
@@ -1653,10 +1663,10 @@ object SymDenotations {
if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents)
else NoType
case _ =>
- baseTypeRefOf(tp.underlying)
+ baseTypeRefOf(tp.superType)
}
case tp: TypeProxy =>
- baseTypeRefOf(tp.underlying)
+ baseTypeRefOf(tp.superType)
case AndType(tp1, tp2) =>
baseTypeRefOf(tp1) & baseTypeRefOf(tp2)
case OrType(tp1, tp2) =>
@@ -1816,6 +1826,7 @@ object SymDenotations {
override def isType = false
override def owner: Symbol = throw new AssertionError("NoDenotation.owner")
override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this
+ override def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = this
validFor = Period.allInRun(NoRunId) // will be brought forward automatically
}
@@ -1863,9 +1874,9 @@ object SymDenotations {
/** A subclass of LazyTypes where type parameters can be completed independently of
* the info.
*/
- abstract class TypeParamsCompleter extends LazyType {
+ trait TypeParamsCompleter extends LazyType {
/** The type parameters computed by the completer before completion has finished */
- def completerTypeParams(sym: Symbol): List[TypeSymbol]
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol]
}
val NoSymbolFn = (ctx: Context) => NoSymbol
diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala
index 1b605e24f..d46ea6b0f 100644
--- a/src/dotty/tools/dotc/core/Symbols.scala
+++ b/src/dotty/tools/dotc/core/Symbols.scala
@@ -367,7 +367,7 @@ object Symbols {
* @param coord The coordinates of the symbol (a position or an index)
* @param id A unique identifier of the symbol (unique per ContextBase)
*/
- class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with printing.Showable {
+ class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with TypeParamInfo with printing.Showable {
type ThisName <: Name
@@ -489,6 +489,15 @@ object Symbols {
*/
def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition
+ // TypeParamInfo methods
+ def isTypeParam(implicit ctx: Context) = denot.is(TypeParam)
+ def paramName(implicit ctx: Context) = name.asTypeName
+ def paramBounds(implicit ctx: Context) = denot.info.bounds
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter
+ def paramVariance(implicit ctx: Context) = denot.variance
+ def paramRef(implicit ctx: Context) = denot.typeRef
+
// -------- Printing --------------------------------------------------------
/** The prefix string to be used when displaying this symbol without denotation */
diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala
index 3ed1798ed..314233709 100644
--- a/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -4,7 +4,7 @@ package core
import Types._
import Contexts._
import Symbols._
-import SymDenotations.TypeParamsCompleter
+import SymDenotations.{LazyType, TypeParamsCompleter}
import Decorators._
import util.Stats._
import util.common._
@@ -15,6 +15,7 @@ import StdNames.tpnme
import util.Positions.Position
import config.Printers._
import collection.mutable
+import dotty.tools.dotc.config.Config
import java.util.NoSuchElementException
object TypeApplications {
@@ -37,56 +38,23 @@ object TypeApplications {
case _ => tp
}
- /** Does the variance of `sym1` conform to the variance of `sym2`?
+ /** Does variance `v1` conform to variance `v2`?
* This is the case if the variances are the same or `sym` is nonvariant.
*/
- def varianceConforms(sym1: TypeSymbol, sym2: TypeSymbol)(implicit ctx: Context) =
- sym1.variance == sym2.variance || sym2.variance == 0
+ def varianceConforms(v1: Int, v2: Int): Boolean =
+ v1 == v2 || v2 == 0
- def variancesConform(syms1: List[TypeSymbol], syms2: List[TypeSymbol])(implicit ctx: Context) =
- syms1.corresponds(syms2)(varianceConforms)
-
- /** Extractor for
- *
- * [v1 X1: B1, ..., vn Xn: Bn] -> T
- * ==>
- * Lambda$_v1...vn { type $hk_i: B_i, type $Apply = [X_i := this.$Arg_i] T }
+ /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`?
*/
- object TypeLambda {
- def apply(variances: List[Int],
- argBoundsFns: List[RefinedType => TypeBounds],
- bodyFn: RefinedType => Type)(implicit ctx: Context): Type = {
- def argRefinements(parent: Type, i: Int, bs: List[RefinedType => TypeBounds]): Type = bs match {
- case b :: bs1 =>
- argRefinements(RefinedType(parent, tpnme.hkArg(i), b), i + 1, bs1)
- case nil =>
- parent
- }
- assert(variances.nonEmpty)
- assert(argBoundsFns.length == variances.length)
- RefinedType(
- argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundsFns),
- tpnme.hkApply, bodyFn(_).bounds.withVariance(1))
- }
+ def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean =
+ varianceConforms(tparam1.paramVariance, tparam2.paramVariance)
- def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match {
- case app @ RefinedType(parent, tpnme.hkApply) =>
- val cls = parent.typeSymbol
- val variances = cls.typeParams.map(_.variance)
- def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match {
- case t @ RefinedType(p, rname) =>
- assert(rname.isHkArgName)
- collectBounds(p, t.refinedInfo.bounds :: acc)
- case TypeRef(_, lname) =>
- assert(lname.isLambdaTraitName)
- acc
- }
- val argBounds = collectBounds(parent, Nil)
- Some((variances, argBounds, app.refinedInfo.argInfo))
- case _ =>
- None
- }
- }
+ /** Do the variances of type parameters `tparams1` conform to the variances
+ * of corresponding type parameters `tparams2`?
+ * This is only the case of `tparams1` and `tparams2` have the same length.
+ */
+ def variancesConform(tparams1: List[TypeParamInfo], tparams2: List[TypeParamInfo])(implicit ctx: Context): Boolean =
+ tparams1.corresponds(tparams2)(varianceConforms)
/** Extractor for
*
@@ -98,25 +66,14 @@ object TypeApplications {
* @param tycon C
*/
object EtaExpansion {
- def apply(tycon: TypeRef)(implicit ctx: Context) = {
- assert(tycon.isEtaExpandable)
- tycon.EtaExpand(tycon.typeParams)
+ def apply(tycon: Type)(implicit ctx: Context) = {
+ assert(tycon.typeParams.nonEmpty, tycon)
+ tycon.EtaExpand(tycon.typeParamSymbols)
}
- def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = {
- def argsAreForwarders(args: List[Type], n: Int): Boolean = args match {
- case Nil =>
- n == 0
- case TypeRef(RefinedThis(rt), sel) :: args1 =>
- rt.eq(tp) && sel == tpnme.hkArg(n - 1) && argsAreForwarders(args1, n - 1)
- case _ =>
- false
- }
- tp match {
- case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args))
- if argsAreForwarders(args, tp.typeParams.length) => Some(fn)
- case _ => None
- }
+ def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = tp match {
+ case tp @ TypeLambda(tparams, AppliedType(fn: TypeRef, args)) if (args == tparams.map(_.toArg)) => Some(fn)
+ case _ => None
}
}
@@ -124,94 +81,112 @@ object TypeApplications {
*
* T { type p_1 v_1= U_1; ...; type p_n v_n= U_n }
*
- * where v_i, p_i are the variances and names of the type parameters of T,
- * If `T`'s class symbol is a lambda trait, follow the refined type with a
- * projection
- *
- * T { ... } # $Apply
+ * where v_i, p_i are the variances and names of the type parameters of T.
*/
object AppliedType {
def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args)
def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
- case TypeRef(prefix, tpnme.hkApply) => unapp(prefix)
- case _ =>
- unapp(tp) match {
- case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTrait =>
- // We are seeing part of a lambda abstraction, not an applied type
- None
- case x => x
+ case tp: RefinedType =>
+ var refinements: List[RefinedType] = Nil
+ var tycon = tp.stripTypeVar
+ while (tycon.isInstanceOf[RefinedType]) {
+ val rt = tycon.asInstanceOf[RefinedType]
+ refinements = rt :: refinements
+ tycon = rt.parent.stripTypeVar
}
- }
-
- private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
- case _: RefinedType =>
- val tparams = tp.classSymbol.typeParams
- if (tparams.isEmpty) None
- else {
- val argBuf = new mutable.ListBuffer[Type]
- def stripArgs(tp: Type, n: Int): Type =
- if (n == 0) tp
- else tp match {
- case tp @ RefinedType(parent, pname) if pname == tparams(n - 1).name =>
- val res = stripArgs(parent, n - 1)
- if (res.exists) argBuf += tp.refinedInfo.argInfo
- res
- case _ =>
- NoType
- }
- val res = stripArgs(tp, tparams.length)
- if (res.exists) Some((res, argBuf.toList)) else None
+ def collectArgs(tparams: List[TypeParamInfo],
+ refinements: List[RefinedType],
+ argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match {
+ case Nil if tparams.isEmpty && argBuf.nonEmpty =>
+ Some((tycon, argBuf.toList))
+ case RefinedType(_, rname, rinfo) :: refinements1
+ if tparams.nonEmpty && rname == tparams.head.paramName =>
+ collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo)
+ case _ =>
+ None
}
- case _ => None
+ collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type])
+ case HKApply(tycon, args) =>
+ Some((tycon, args))
+ case _ =>
+ None
}
}
/** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK
*/
- def etaExpandIfHK(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] =
+ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] =
if (tparams.isEmpty) args
- else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.infoOrCompleter))
+ else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramBoundsOrCompleter))
- /** The references `<rt>.this.$hk0, ..., <rt>.this.$hk<n-1>`. */
- def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) =
- List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i)))
-
- /** Merge `tp1` and `tp2` under a common lambda, combining them with `op`.
- * @param tparams1 The type parameters of `tp1`
- * @param tparams2 The type parameters of `tp2`
- * @pre tparams1.length == tparams2.length
- * Produces the type lambda
+ /** A type map that tries to reduce (part of) the result type of the type lambda `tycon`
+ * with the given `args`(some of which are wildcard arguments represented by type bounds).
+ * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument
+ * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions.
*
- * [v1 X1 B1, ..., vn Xn Bn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn])
+ * 1. If Mode.AllowLambdaWildcardApply is set:
+ * The wildcard argument is substituted only if `X` appears in a toplevel refinement of the form
*
- * where
+ * { type A = X }
+ *
+ * and there are no other occurrences of `X` in the reduced type. In that case
+ * the refinement above is replaced by
+ *
+ * { type A >: L <: U }
+ *
+ * The `allReplaced` field indicates whether all occurrences of type lambda parameters
+ * in the reduced type have been replaced with arguments.
*
- * - variances `vi` are the variances of corresponding type parameters for `tp1`
- * or `tp2`, or are 0 of the latter disagree.
- * - bounds `Bi` are the intersection of the corresponding type parameter bounds
- * of `tp1` and `tp2`.
+ * 2. If Mode.AllowLambdaWildcardApply is not set:
+ * All refinements of the form
+ *
+ * { type A = X }
+ *
+ * are replaced by:
+ *
+ * { type A >: L <: U }
+ *
+ * Any other occurrence of `X` in `tycon` is replaced by `U`, if the
+ * occurrence of `X` in `tycon` is covariant, or nonvariant, or by `L`,
+ * if the occurrence is contravariant.
+ *
+ * The idea is that the `AllowLambdaWildcardApply` mode is used to check whether
+ * a type can be soundly reduced, and to give an error or warning if that
+ * is not the case. By contrast, the default mode, with `AllowLambdaWildcardApply`
+ * not set, reduces all applications even if this yields a different type, so
+ * its postcondition is that no type parameters of `tycon` appear in the
+ * result type. Using this mode, we can guarantee that `appliedTo` will never
+ * produce a higher-kinded application with a type lambda as type constructor.
*/
- def hkCombine(tp1: Type, tp2: Type,
- tparams1: List[TypeSymbol], tparams2: List[TypeSymbol], op: (Type, Type) => Type)
- (implicit ctx: Context): Type = {
- val variances = (tparams1, tparams2).zipped.map { (tparam1, tparam2) =>
- val v1 = tparam1.variance
- val v2 = tparam2.variance
- if (v1 == v2) v1 else 0
+ class Reducer(tycon: TypeLambda, args: List[Type])(implicit ctx: Context) extends TypeMap {
+ private var available = (0 until args.length).toSet
+ var allReplaced = true
+ def hasWildcardArg(p: PolyParam) =
+ p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds]
+ def canReduceWildcard(p: PolyParam) =
+ !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum)
+ def apply(t: Type) = t match {
+ case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && canReduceWildcard(p) =>
+ available -= p.paramNum
+ args(p.paramNum)
+ case p: PolyParam if p.binder == tycon =>
+ args(p.paramNum) match {
+ case TypeBounds(lo, hi) =>
+ if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p }
+ else if (variance < 0) lo
+ else hi
+ case arg =>
+ arg
+ }
+ case _: TypeBounds | _: HKApply =>
+ val saved = available
+ available = Set()
+ try mapOver(t)
+ finally available = saved
+ case _ =>
+ mapOver(t)
}
- val bounds: List[RefinedType => TypeBounds] =
- (tparams1, tparams2).zipped.map { (tparam1, tparam2) =>
- val b1: RefinedType => TypeBounds =
- tp1.memberInfo(tparam1).bounds.internalizeFrom(tparams1)
- val b2: RefinedType => TypeBounds =
- tp2.memberInfo(tparam2).bounds.internalizeFrom(tparams2)
- (rt: RefinedType) => b1(rt) & b2(rt)
- }
- val app1: RefinedType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length))
- val app2: RefinedType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length))
- val body: RefinedType => Type = rt => op(app1(rt), app2(rt))
- TypeLambda(variances, bounds, body)
}
}
@@ -233,38 +208,25 @@ class TypeApplications(val self: Type) extends AnyVal {
* with the bounds on its hk args. See `LambdaAbstract`, where these
* types get introduced, and see `isBoundedLambda` below for the test.
*/
- final def typeParams(implicit ctx: Context): List[TypeSymbol] = /*>|>*/ track("typeParams") /*<|<*/ {
+ final def typeParams(implicit ctx: Context): List[TypeParamInfo] = /*>|>*/ track("typeParams") /*<|<*/ {
self match {
case self: ClassInfo =>
self.cls.typeParams
+ case self: TypeLambda =>
+ self.typeParams
case self: TypeRef =>
val tsym = self.symbol
if (tsym.isClass) tsym.typeParams
- else tsym.infoOrCompleter match {
- case completer: TypeParamsCompleter =>
- val tparams = completer.completerTypeParams(tsym)
- defn.LambdaTrait(tparams.map(_.variance)).typeParams
- case _ =>
- if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams
- else
- // We are facing a problem when computing the type parameters of an uncompleted
- // abstract type. We can't access the bounds of the symbol yet because that
- // would cause a cause a cyclic reference. So we return `Nil` instead
- // and try to make up for it later. The acrobatics in Scala2Unpicker#readType
- // for reading a TypeRef show what's needed.
- Nil
- }
+ else if (!tsym.isCompleting) tsym.info.typeParams
+ else Nil
case self: RefinedType =>
- // inlined and optimized version of
- // val sym = self.LambdaTrait
- // if (sym.exists) return sym.typeParams
- if (self.refinedName == tpnme.hkApply) {
- val sym = self.parent.classSymbol
- if (sym.isLambdaTrait) return sym.typeParams
- }
- self.parent.typeParams.filterNot(_.name == self.refinedName)
- case self: SingletonType =>
+ self.parent.typeParams.filterNot(_.paramName == self.refinedName)
+ case self: RecType =>
+ self.parent.typeParams
+ case _: HKApply | _: SingletonType =>
Nil
+ case self: WildcardType =>
+ self.optBounds.typeParams
case self: TypeProxy =>
self.underlying.typeParams
case _ =>
@@ -272,6 +234,18 @@ class TypeApplications(val self: Type) extends AnyVal {
}
}
+ /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */
+ final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] =
+ if (isHK) typeParams else Nil
+
+ /** If `self` is a generic class, its type parameter symbols, otherwise Nil */
+ final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match {
+ case (_: Symbol) :: _ =>
+ assert(typeParams.forall(_.isInstanceOf[Symbol]))
+ typeParams.asInstanceOf[List[TypeSymbol]]
+ case _ => Nil
+ }
+
/** The named type parameters declared or inherited by this type.
* These are all uninstantiated named type parameters of this type or one
* of its base types.
@@ -326,112 +300,61 @@ class TypeApplications(val self: Type) extends AnyVal {
if (params.exists(_.name == self.refinedName)) parent1
else self.derivedRefinedType(parent1, self.refinedName, self.refinedInfo)
case self: TypeProxy =>
- self.underlying.widenToNamedTypeParams(params)
+ self.superType.widenToNamedTypeParams(params)
case self: AndOrType =>
self.derivedAndOrType(
self.tp1.widenToNamedTypeParams(params), self.tp2.widenToNamedTypeParams(params))
}
}
- /** The Lambda trait underlying a type lambda */
- def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match {
- case RefinedType(parent, tpnme.hkApply) =>
- val sym = self.classSymbol
- if (sym.isLambdaTrait) sym else NoSymbol
- case TypeBounds(lo, hi) => hi.LambdaTrait
- case _ => NoSymbol
- }
-
- /** Is receiver type higher-kinded (i.e. of kind != "*")? */
+ /** Is self type higher-kinded (i.e. of kind != "*")? */
def isHK(implicit ctx: Context): Boolean = self.dealias match {
case self: TypeRef => self.info.isHK
- case RefinedType(_, name) => name == tpnme.hkApply
- case TypeBounds(_, hi) => hi.isHK
- case _ => false
- }
-
- /** is receiver of the form T#$Apply? */
- def isHKApply: Boolean = self match {
- case TypeRef(_, name) => name == tpnme.hkApply
+ case self: RefinedType => false
+ case self: TypeLambda => true
+ case self: HKApply => false
+ case self: SingletonType => false
+ case self: TypeVar =>
+ // Using `origin` instead of `underlying`, as is done for typeParams,
+ // avoids having to set ephemeral in some cases.
+ self.origin.isHK
+ case self: WildcardType => self.optBounds.isHK
+ case self: TypeProxy => self.underlying.isHK
case _ => false
}
- /** True if it can be determined without forcing that the class symbol
- * of this application exists and is not a lambda trait.
- * Equivalent to
- *
- * self.classSymbol.exists && !self.classSymbol.isLambdaTrait
- *
- * but without forcing anything.
- */
- def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match {
- case self: RefinedType =>
- self.parent.classNotLambda
- case self: TypeRef =>
- self.denot.exists && {
- val sym = self.symbol
- if (sym.isClass) !sym.isLambdaTrait
- else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda
- }
- case _ =>
- false
- }
-
- /** Dealias type if it can be done without forcing anything */
+ /** Dealias type if it can be done without forcing the TypeRef's info */
def safeDealias(implicit ctx: Context): Type = self match {
case self: TypeRef if self.denot.exists && self.symbol.isAliasType =>
- self.info.bounds.hi.stripTypeVar.safeDealias
+ self.superType.stripTypeVar.safeDealias
case _ =>
self
}
- /** Replace references to type parameters with references to hk arguments `this.$hk_i`
- * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`.
- */
- private[TypeApplications] def internalizeFrom[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RefinedType => T =
- (rt: RefinedType) =>
- new ctx.SafeSubstMap(tparams , argRefs(rt, tparams.length))
- .apply(self).asInstanceOf[T]
-
/** Lambda abstract `self` with given type parameters. Examples:
*
* type T[X] = U becomes type T = [X] -> U
- * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U)
+ * type T[X] >: L <: U becomes type T >: L <: ([X] -> U)
+ *
+ * TODO: Handle parameterized lower bounds
*/
- def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = {
-
- /** Replace references to type parameters with references to hk arguments `this.$hk_i`
- * Care is needed not to cause cycles, hence `SafeSubstMap`.
- */
- def internalize[T <: Type](tp: T) =
- (rt: RefinedType) =>
- new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length))
- .apply(tp).asInstanceOf[T]
-
- def expand(tp: Type) = {
+ def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = {
+ def expand(tp: Type) =
TypeLambda(
- tparams.map(_.variance),
- tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)),
- internalize(tp))
- }
+ tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.paramVariance))(
+ tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tl.lifted(tparams, tp))
self match {
case self: TypeAlias =>
self.derivedTypeAlias(expand(self.alias))
case self @ TypeBounds(lo, hi) =>
- self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi)))
+ self.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else expand(lo),
+ expand(hi))
case _ => expand(self)
}
}
- /** A type ref is eta expandable if it refers to a non-lambda class.
- * In that case we can look for parameterized base types of the type
- * to eta expand them.
- */
- def isEtaExpandable(implicit ctx: Context) = self match {
- case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName
- case _ => false
- }
-
/** Convert a type constructor `TC` which has type parameters `T1, ..., Tn`
* in a context where type parameters `U1,...,Un` are expected to
*
@@ -443,23 +366,18 @@ class TypeApplications(val self: Type) extends AnyVal {
* v1 is compatible with v2, if v1 = v2 or v2 is non-variant.
*/
def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = {
- val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams
+ val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParamSymbols
self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse)
//.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}")
}
- /** Eta expand the prefix in front of any refinements. */
- def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match {
- case self: RefinedType =>
- self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo)
- case _ =>
- self.EtaExpand(self.typeParams)
- }
+ /** If self is not higher-kinded, eta expand it. */
+ def ensureHK(implicit ctx: Context): Type =
+ if (isHK) self else EtaExpansion(self)
/** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */
- def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
- val boundLambda = bound.LambdaTrait
- val hkParams = boundLambda.typeParams
+ def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
if (hkParams.isEmpty) self
else self match {
case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length =>
@@ -500,21 +418,21 @@ class TypeApplications(val self: Type) extends AnyVal {
* is covariant is irrelevant, so can be ignored.
*/
def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = {
- val boundLambda = bound.LambdaTrait
- val hkParams = boundLambda.typeParams
+ val hkParams = bound.hkTypeParams
if (hkParams.isEmpty) self
else {
def adaptArg(arg: Type): Type = arg match {
- case arg: TypeRef if arg.symbol.isLambdaTrait &&
- !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.variance) &&
- arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) =>
- arg.prefix.select(boundLambda)
- case arg: RefinedType =>
- arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo)
+ case arg @ TypeLambda(tparams, body) if
+ !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) &&
+ tparams.corresponds(hkParams)(varianceConforms) =>
+ TypeLambda(tparams.map(_.paramName), hkParams.map(_.paramVariance))(
+ tl => arg.paramBounds.map(_.subst(arg, tl).bounds),
+ tl => arg.resultType.subst(arg, tl)
+ )
case arg @ TypeAlias(alias) =>
arg.derivedTypeAlias(adaptArg(alias))
case arg @ TypeBounds(lo, hi) =>
- arg.derivedTypeBounds(lo, adaptArg(hi))
+ arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi))
case _ =>
arg
}
@@ -522,99 +440,113 @@ class TypeApplications(val self: Type) extends AnyVal {
}
}
- /** Encode
+ /** The type representing
*
* T[U1, ..., Un]
*
* where
* @param self = `T`
* @param args = `U1,...,Un`
- * performing the following simplifications
- *
- * 1. If `T` is an eta expansion `[X1,..,Xn] -> C[X1,...,Xn]` of class `C` compute
- * `C[U1, ..., Un]` instead.
- * 2. If `T` is some other type lambda `[X1,...,Xn] -> S` none of the arguments
- * `U1,...,Un` is a wildcard, compute `[X1:=U1, ..., Xn:=Un]S` instead.
- * 3. If `T` is a polytype, instantiate it to `U1,...,Un`.
*/
final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ {
- def substHkArgs = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(RefinedThis(rt), name) if rt.eq(self) && name.isHkArgName =>
- args(name.hkArgIndex)
- case _ =>
- mapOver(tp)
- }
- }
- if (args.isEmpty || ctx.erasedTypes) self
- else self.stripTypeVar match {
- case EtaExpansion(self1) =>
- self1.appliedTo(args)
- case TypeLambda(_, _, body) if !args.exists(_.isInstanceOf[TypeBounds]) =>
- substHkArgs(body)
- case self: PolyType =>
- self.instantiate(args)
- case self1 =>
- self1.safeDealias.appliedTo(args, typeParams)
- }
- }
-
- /** Encode application `T[U1, ..., Un]` without simplifications, where
- * @param self = `T`
- * @param args = `U1, ..., Un`
- * @param tparams are assumed to be the type parameters of `T`.
- */
- final def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = {
- def matchParams(t: Type, tparams: List[TypeSymbol], args: List[Type])(implicit ctx: Context): Type = args match {
+ val typParams = self.typeParams
+ def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match {
case arg :: args1 =>
try {
val tparam :: tparams1 = tparams
- matchParams(RefinedType(t, tparam.name, arg.toBounds(tparam)), tparams1, args1)
+ matchParams(RefinedType(t, tparam.paramName, arg.toBounds(tparam)), tparams1, args1)
} catch {
case ex: MatchError =>
- println(s"applied type mismatch: $self $args, typeParams = $typParams") // !!! DEBUG
+ println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG
//println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}")
throw ex
}
case nil => t
}
- assert(args.nonEmpty)
- matchParams(self, typParams, args) match {
- case refined @ RefinedType(_, pname) if pname.isHkArgName =>
- TypeRef(refined, tpnme.hkApply)
- case refined =>
- refined
+ val stripped = self.stripTypeVar
+ val dealiased = stripped.safeDealias
+ if (args.isEmpty || ctx.erasedTypes) self
+ else dealiased match {
+ case dealiased: TypeLambda =>
+ def tryReduce =
+ if (!args.exists(_.isInstanceOf[TypeBounds])) {
+ val followAlias = stripped match {
+ case stripped: TypeRef =>
+ stripped.symbol.is(BaseTypeArg)
+ case _ =>
+ Config.simplifyApplications && {
+ dealiased.resType match {
+ case AppliedType(tyconBody, _) =>
+ variancesConform(typParams, tyconBody.typeParams)
+ // Reducing is safe for type inference, as kind of type constructor does not change
+ case _ => false
+ }
+ }
+ }
+ if ((dealiased eq stripped) || followAlias) dealiased.instantiate(args)
+ else HKApply(self, args)
+ }
+ else dealiased.resType match {
+ case AppliedType(tycon, args1) if tycon.safeDealias ne tycon =>
+ // In this case we should always dealias since we cannot handle
+ // higher-kinded applications to wildcard arguments.
+ dealiased
+ .derivedTypeLambda(resType = tycon.safeDealias.appliedTo(args1))
+ .appliedTo(args)
+ case _ =>
+ val reducer = new Reducer(dealiased, args)
+ val reduced = reducer(dealiased.resType)
+ if (reducer.allReplaced) reduced
+ else HKApply(dealiased, args)
+ }
+ tryReduce
+ case dealiased: PolyType =>
+ dealiased.instantiate(args)
+ case dealiased: AndOrType =>
+ dealiased.derivedAndOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args))
+ case dealiased: TypeAlias =>
+ dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args))
+ case dealiased: TypeBounds =>
+ dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args))
+ case dealiased: LazyRef =>
+ LazyRef(() => dealiased.ref.appliedTo(args))
+ case dealiased: WildcardType =>
+ dealiased
+ case dealiased: TypeRef if dealiased.symbol == defn.NothingClass =>
+ dealiased
+ case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] =>
+ HKApply(self, args)
+ case dealiased =>
+ matchParams(dealiased, typParams, args)
}
}
final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil)
final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil)
+ final def applyIfParameterized(args: List[Type])(implicit ctx: Context): Type =
+ if (typeParams.nonEmpty) appliedTo(args) else self
+
/** A cycle-safe version of `appliedTo` where computing type parameters do not force
* the typeconstructor. Instead, if the type constructor is completing, we make
* up hk type parameters matching the arguments. This is needed when unpickling
* Scala2 files such as `scala.collection.generic.Mapfactory`.
*/
- final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = {
- val safeTypeParams = self match {
- case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
- // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC
- ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant")
- defn.LambdaTrait(args map alwaysZero).typeParams
- case _ =>
- typeParams
- }
- appliedTo(args, safeTypeParams)
+ final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = self match {
+ case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
+ HKApply(self, args)
+ case _ =>
+ appliedTo(args)
}
/** Turn this type, which is used as an argument for
* type parameter `tparam`, into a TypeBounds RHS
*/
- final def toBounds(tparam: Symbol)(implicit ctx: Context): TypeBounds = self match {
+ final def toBounds(tparam: TypeParamInfo)(implicit ctx: Context): TypeBounds = self match {
case self: TypeBounds => // this can happen for wildcard args
self
case _ =>
- val v = tparam.variance
+ val v = tparam.paramVariance
/* Not neeeded.
if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self)
else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self)
@@ -628,7 +560,11 @@ class TypeApplications(val self: Type) extends AnyVal {
*/
final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] =
if (self derivesFrom base)
- base.typeParams map (param => self.member(param.name).info.argInfo)
+ self.dealias match {
+ case self: TypeRef if !self.symbol.isClass => self.superType.baseArgInfos(base)
+ case self: HKApply => self.superType.baseArgInfos(base)
+ case _ => base.typeParams.map(param => self.member(param.name).info.argInfo)
+ }
else
Nil
@@ -650,14 +586,6 @@ class TypeApplications(val self: Type) extends AnyVal {
final def baseArgTypesHi(base: Symbol)(implicit ctx: Context): List[Type] =
baseArgInfos(base) mapConserve boundsToHi
- /** The first type argument of the base type instance wrt `base` of this type */
- final def firstBaseArgInfo(base: Symbol)(implicit ctx: Context): Type = base.typeParams match {
- case param :: _ if self derivesFrom base =>
- self.member(param.name).info.argInfo
- case _ =>
- NoType
- }
-
/** The base type including all type arguments and applicable refinements
* of this type. Refinements are applicable if they refine a member of
* the parent type which furthermore is not a name-mangled type parameter.
@@ -671,10 +599,12 @@ class TypeApplications(val self: Type) extends AnyVal {
case TypeBounds(_, hi) => hi.baseTypeWithArgs(base)
case _ => default
}
- case tp @ RefinedType(parent, name) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
+ case tp @ RefinedType(parent, name, _) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
tp.wrapIfMember(parent.baseTypeWithArgs(base))
case tp: TermRef =>
tp.underlying.baseTypeWithArgs(base)
+ case tp: HKApply =>
+ tp.superType.baseTypeWithArgs(base)
case AndType(tp1, tp2) =>
tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base)
case OrType(tp1, tp2) =>
@@ -729,17 +659,16 @@ class TypeApplications(val self: Type) extends AnyVal {
/** The core type without any type arguments.
* @param `typeArgs` must be the type arguments of this type.
*/
- final def withoutArgs(typeArgs: List[Type]): Type = typeArgs match {
- case _ :: typeArgs1 =>
- val RefinedType(tycon, _) = self
- tycon.withoutArgs(typeArgs1)
- case nil =>
- self
- }
-
- final def typeConstructor(implicit ctx: Context): Type = self.stripTypeVar match {
- case AppliedType(tycon, _) => tycon
- case self => self
+ final def withoutArgs(typeArgs: List[Type]): Type = self match {
+ case HKApply(tycon, args) => tycon
+ case _ =>
+ typeArgs match {
+ case _ :: typeArgs1 =>
+ val RefinedType(tycon, _, _) = self
+ tycon.withoutArgs(typeArgs1)
+ case nil =>
+ self
+ }
}
/** If this is the image of a type argument; recover the type argument,
@@ -761,33 +690,6 @@ class TypeApplications(val self: Type) extends AnyVal {
def elemType(implicit ctx: Context): Type = self match {
case defn.ArrayOf(elemtp) => elemtp
case JavaArrayType(elemtp) => elemtp
- case _ => firstBaseArgInfo(defn.SeqClass)
- }
-
- /** Does this type contain RefinedThis type with `target` as its underling
- * refinement type?
- */
- def containsRefinedThis(target: Type)(implicit ctx: Context): Boolean = {
- def recur(tp: Type): Boolean = tp.stripTypeVar match {
- case RefinedThis(tp) =>
- tp eq target
- case tp: NamedType =>
- if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix)
- else tp.info match {
- case TypeAlias(alias) => recur(alias)
- case _ => recur(tp.prefix)
- }
- case tp: RefinedType =>
- recur(tp.refinedInfo) || recur(tp.parent)
- case tp: TypeBounds =>
- recur(tp.lo) || recur(tp.hi)
- case tp: AnnotatedType =>
- recur(tp.underlying)
- case tp: AndOrType =>
- recur(tp.tp1) || recur(tp.tp2)
- case _ =>
- false
- }
- recur(self)
+ case _ => baseArgInfos(defn.SeqClass).headOption.getOrElse(NoType)
}
}
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 2523c6b9a..a895db178 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -88,7 +88,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
assert(isSatisfiable, constraint.show)
}
- protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) /*<|<*/ {
+ protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) {
if (tp2 eq NoType) false
else if (tp1 eq tp2) true
else {
@@ -178,11 +178,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
&& !tp1.isInstanceOf[WithFixedSym]
&& !tp2.isInstanceOf[WithFixedSym]
) ||
- compareHkApply(tp1, tp2, inOrder = true) ||
- compareHkApply(tp2, tp1, inOrder = false) ||
thirdTryNamed(tp1, tp2)
case _ =>
- compareHkApply(tp2, tp1, inOrder = false) ||
secondTry(tp1, tp2)
}
}
@@ -259,7 +256,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
if (tp1.prefix.isStable) return false
case _ =>
}
- compareHkApply(tp1, tp2, inOrder = true) ||
thirdTry(tp1, tp2)
case tp1: PolyParam =>
def flagNothingBound = {
@@ -368,16 +364,63 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
// This twist is needed to make collection/generic/ParFactory.scala compile
fourthTry(tp1, tp2) || compareRefinedSlow
case _ =>
- compareRefinedSlow ||
- fourthTry(tp1, tp2) ||
- compareHkLambda(tp2, tp1, inOrder = false) ||
- compareAliasedRefined(tp2, tp1, inOrder = false)
+ compareRefinedSlow || fourthTry(tp1, tp2)
}
else // fast path, in particular for refinements resulting from parameterization.
- isSubType(tp1, skipped2) &&
- isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2)
+ isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) &&
+ isSubType(tp1, skipped2)
}
compareRefined
+ case tp2: RecType =>
+ def compareRec = tp1.safeDealias match {
+ case tp1: RecType =>
+ val rthis1 = RecThis(tp1)
+ isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1))
+ case _ =>
+ val tp1stable = ensureStableSingleton(tp1)
+ isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ }
+ compareRec
+ case tp2 @ HKApply(tycon2, args2) =>
+ compareHkApply2(tp1, tp2, tycon2, args2)
+ case tp2 @ TypeLambda(tparams2, body2) =>
+ def compareHkLambda: Boolean = tp1.stripTypeVar match {
+ case tp1 @ TypeLambda(tparams1, body1) =>
+ /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail
+ * The issue is that, logically, bounds should compare contravariantly,
+ * but that would invalidate a pattern exploited in t2994:
+ *
+ * [X0 <: Number] -> Number <:< [X0] -> Any
+ *
+ * Under the new scheme, `[X0] -> Any` is NOT a kind that subsumes
+ * all other bounds. You'd have to write `[X0 >: Any <: Nothing] -> Any` instead.
+ * This might look weird, but is the only logically correct way to do it.
+ *
+ * Note: it would be nice if this could trigger a migration warning, but I
+ * am not sure how, since the code is buried so deep in subtyping logic.
+ */
+ def boundsOK =
+ ctx.scala2Mode ||
+ tparams1.corresponds(tparams2)((tparam1, tparam2) =>
+ isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds))
+ val saved = comparingLambdas
+ comparingLambdas = true
+ try
+ variancesConform(tparams1, tparams2) &&
+ boundsOK &&
+ isSubType(body1, body2.subst(tp2, tp1))
+ finally comparingLambdas = saved
+ case _ =>
+ if (!tp1.isHK) {
+ tp2 match {
+ case EtaExpansion(tycon2) if tycon2.symbol.isClass =>
+ return isSubType(tp1, tycon2)
+ case _ =>
+ }
+ }
+ fourthTry(tp1, tp2)
+ }
+ compareHkLambda
case OrType(tp21, tp22) =>
// Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22)
// and analogously for T1 <: T21 | (T221 & T222)
@@ -394,11 +437,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
return isSubType(tp1, OrType(tp21, tp221)) && isSubType(tp1, OrType(tp21, tp222))
case _ =>
}
- eitherIsSubType(tp1, tp21, tp1, tp22) || fourthTry(tp1, tp2)
+ either(isSubType(tp1, tp21), isSubType(tp1, tp22)) || fourthTry(tp1, tp2)
case tp2 @ MethodType(_, formals2) =>
def compareMethod = tp1 match {
case tp1 @ MethodType(_, formals1) =>
- (tp1.signature sameParams tp2.signature) &&
+ (tp1.signature consistentParams tp2.signature) &&
matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
tp1.isImplicit == tp2.isImplicit && // needed?
isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
@@ -409,7 +452,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case tp2: PolyType =>
def comparePoly = tp1 match {
case tp1: PolyType =>
- (tp1.signature sameParams tp2.signature) &&
+ (tp1.signature consistentParams tp2.signature) &&
matchingTypeParams(tp1, tp2) &&
isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
case _ =>
@@ -465,7 +508,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case _ =>
def isNullable(tp: Type): Boolean = tp.dealias match {
case tp: TypeRef => tp.symbol.isNullableClass
- case RefinedType(parent, _) => isNullable(parent)
+ case tp: RefinedOrRecType => isNullable(tp.parent)
case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2)
case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2)
case _ => false
@@ -491,9 +534,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
}
isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths
case tp1: RefinedType =>
- isNewSubType(tp1.parent, tp2) ||
- compareHkLambda(tp1, tp2, inOrder = true) ||
- compareAliasedRefined(tp1, tp2, inOrder = true)
+ isNewSubType(tp1.parent, tp2)
+ case tp1: RecType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1 @ HKApply(tycon1, args1) =>
+ compareHkApply1(tp1, tycon1, args1, tp2)
+ case EtaExpansion(tycon1) =>
+ isSubType(tycon1, tp2)
case AndType(tp11, tp12) =>
// Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2
// and analogously for T11 & (T121 | T122) & T12 <: T2
@@ -510,7 +557,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
return isSubType(AndType(tp11, tp121), tp2) && isSubType(AndType(tp11, tp122), tp2)
case _ =>
}
- eitherIsSubType(tp11, tp2, tp12, tp2)
+ either(isSubType(tp11, tp2), isSubType(tp12, tp2))
case JavaArrayType(elem1) =>
def compareJavaArray = tp2 match {
case JavaArrayType(elem2) => isSubType(elem1, elem2)
@@ -524,13 +571,170 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
false
}
+ /** Subtype test for the hk application `tp2 = tycon2[args2]`.
+ */
+ def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ val tparams = tycon2.typeParams
+ assert(tparams.nonEmpty)
+
+ /** True if `tp1` and `tp2` have compatible type constructors and their
+ * corresponding arguments are subtypes relative to their variance (see `isSubArgs`).
+ */
+ def isMatchingApply(tp1: Type): Boolean = tp1 match {
+ case HKApply(tycon1, args1) =>
+ tycon1.dealias match {
+ case tycon1: PolyParam =>
+ (tycon1 == tycon2 ||
+ canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) &&
+ isSubArgs(args1, args2, tparams)
+ case tycon1: TypeRef =>
+ tycon2.dealias match {
+ case tycon2: TypeRef if tycon1.symbol == tycon2.symbol =>
+ isSubType(tycon1.prefix, tycon2.prefix) &&
+ isSubArgs(args1, args2, tparams)
+ case _ =>
+ false
+ }
+ case tycon1: TypeVar =>
+ isMatchingApply(tycon1.underlying)
+ case tycon1: AnnotatedType =>
+ isMatchingApply(tycon1.underlying)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** `param2` can be instantiated to a type application prefix of the LHS
+ * or to a type application prefix of one of the LHS base class instances
+ * and the resulting type application is a supertype of `tp1`,
+ * or fallback to fourthTry.
+ */
+ def canInstantiate(tycon2: PolyParam): Boolean = {
+
+ /** Let
+ *
+ * `tparams_1, ..., tparams_k-1` be the type parameters of the rhs
+ * `tparams1_1, ..., tparams1_n-1` be the type parameters of the constructor of the lhs
+ * `args1_1, ..., args1_n-1` be the type arguments of the lhs
+ * `d = n - k`
+ *
+ * Returns `true` iff `d >= 0` and `tycon2` can be instantiated to
+ *
+ * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1]
+ *
+ * such that the resulting type application is a supertype of `tp1`.
+ */
+ def tyconOK(tycon1a: Type, args1: List[Type]) = {
+ var tycon1b = tycon1a
+ val tparams1a = tycon1a.typeParams
+ val lengthDiff = tparams1a.length - tparams.length
+ lengthDiff >= 0 && {
+ val tparams1 = tparams1a.drop(lengthDiff)
+ variancesConform(tparams1, tparams) && {
+ if (lengthDiff > 0)
+ tycon1b = TypeLambda(tparams1.map(_.paramName), tparams1.map(_.paramVariance))(
+ tl => tparams1.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tycon1a.appliedTo(args1.take(lengthDiff) ++
+ tparams1.indices.toList.map(PolyParam(tl, _))))
+ (ctx.mode.is(Mode.TypevarsMissContext) ||
+ tryInstantiate(tycon2, tycon1b.ensureHK)) &&
+ isSubType(tp1, tycon1b.appliedTo(args2))
+ }
+ }
+ }
+
+ tp1.widen match {
+ case tp1w @ HKApply(tycon1, args1) =>
+ tyconOK(tycon1, args1)
+ case tp1w =>
+ tp1w.typeSymbol.isClass && {
+ val classBounds = tycon2.classSymbols
+ def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ classBounds.exists(bc.derivesFrom) &&
+ tyconOK(tp1w.baseTypeRef(bc), tp1w.baseArgInfos(bc)) ||
+ liftToBase(bcs1)
+ case _ =>
+ false
+ }
+ liftToBase(tp1w.baseClasses)
+ } ||
+ fourthTry(tp1, tp2)
+ }
+ }
+
+ /** Let `tycon2bounds` be the bounds of the RHS type constructor `tycon2`.
+ * Let `app2 = tp2` where the type constructor of `tp2` is replaced by
+ * `tycon2bounds.lo`.
+ * If both bounds are the same, continue with `tp1 <:< app2`.
+ * otherwise continue with either
+ *
+ * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1)
+ * tp1 <:< app2 using isSubType (this might instantiate params in tp2)
+ */
+ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = {
+ def app2 = tycon2bounds.lo.applyIfParameterized(args2)
+ if (tycon2bounds.lo eq tycon2bounds.hi)
+ isSubType(tp1, if (tyconIsTypeRef) tp2.superType else app2)
+ else
+ either(fourthTry(tp1, tp2), isSubType(tp1, app2))
+ }
+
+ tycon2 match {
+ case param2: PolyParam =>
+ isMatchingApply(tp1) || {
+ if (canConstrain(param2)) canInstantiate(param2)
+ else compareLower(bounds(param2), tyconIsTypeRef = false)
+ }
+ case tycon2: TypeRef =>
+ isMatchingApply(tp1) ||
+ compareLower(tycon2.info.bounds, tyconIsTypeRef = true)
+ case _: TypeVar | _: AnnotatedType =>
+ isSubType(tp1, tp2.superType)
+ case _ =>
+ false
+ }
+ }
+
+ /** Subtype test for the hk application `tp1 = tycon1[args1]`.
+ */
+ def compareHkApply1(tp1: HKApply, tycon1: Type, args1: List[Type], tp2: Type): Boolean =
+ tycon1 match {
+ case param1: PolyParam =>
+ def canInstantiate = tp2 match {
+ case AppliedType(tycon2, args2) =>
+ tryInstantiate(param1, tycon2.ensureHK) && isSubArgs(args1, args2, tycon2.typeParams)
+ case _ =>
+ false
+ }
+ canConstrain(param1) && canInstantiate ||
+ isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2)
+ case tycon1: TypeProxy =>
+ isSubType(tp1.superType, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for corresponding arguments in `args1`, `args2` according to
+ * variances in type parameters `tparams`.
+ */
+ def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): Boolean =
+ if (args1.isEmpty) args2.isEmpty
+ else args2.nonEmpty && {
+ val v = tparams.head.paramVariance
+ (v > 0 || isSubType(args2.head, args1.head)) &&
+ (v < 0 || isSubType(args1.head, args2.head))
+ } && isSubArgs(args1.tail, args2.tail, tparams)
+
/** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
* - `B` derives from one of the class symbols of `tp2`,
* - the type parameters of `B` match one-by-one the variances of `tparams`,
* - `B` satisfies predicate `p`.
*/
- private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeSymbol], p: Type => Boolean): Boolean = {
- val classBounds = tp2.member(tpnme.hkApply).info.classSymbols
+ private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = {
+ val classBounds = tp2.classSymbols
def recur(bcs: List[ClassSymbol]): Boolean = bcs match {
case bc :: bcs1 =>
val baseRef = tp1.baseTypeRef(bc)
@@ -545,108 +749,28 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
recur(tp1.baseClasses)
}
- /** If `projection` is a hk projection T#$apply with a constrainable poly param
- * as type constructor and `other` is not a hk projection, then perform the following
- * steps:
- *
- * (1) If not `inOrder` then perform the next steps until they all succeed
- * for each base type of other which
- * - derives from a class bound of `projection`,
- * - has the same number of type parameters than `projection`
- * - has type parameter variances which conform to those of `projection`.
- * If `inOrder` then perform the same steps on the original `other` type.
- *
- * (2) Try to eta expand the constructor of `other`.
- *
- * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter
- * by the eta expansion of step (2) reapplied to the projection's arguments.
- * (3b) In normal mode, try to unify the projection's hk constructor parameter with
- * the eta expansion of step(2)
- *
- * (4) If `inOrder`, test `projection <: other` else test `other <: projection`.
+ /** Replace any top-level recursive type `{ z => T }` in `tp` with
+ * `[z := anchor]T`.
*/
- def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean): Boolean = {
- def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($projection, $other, inOrder = $inOrder, constr = $tp)", subtyping) {
- tp match {
- case tp: TypeVar => tryInfer(tp.underlying)
- case param: PolyParam if canConstrain(param) =>
-
- def unifyWith(liftedOther: Type): Boolean = {
- subtyping.println(i"unify with $liftedOther")
- liftedOther.typeConstructor.widen match {
- case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty =>
- val (ok, projection1) =
- if (ctx.mode.is(Mode.TypevarsMissContext))
- (true, EtaExpansion(tycon).appliedTo(projection.argInfos))
- else
- (tryInstantiate(param, EtaExpansion(tycon)), projection)
- ok &&
- (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1))
- case _ =>
- false
- }
- }
- val hkTypeParams = param.typeParams
- subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApply).info.classSymbols}")
- subtyping.println(i"base classes = ${other.baseClasses}")
- subtyping.println(i"type params = $hkTypeParams")
- if (inOrder) unifyWith(other)
- else testLifted(other, projection.prefix, hkTypeParams, unifyWith)
- case _ =>
- false
- }
+ private def fixRecs(anchor: SingletonType, tp: Type): Type = {
+ def fix(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType => fix(tp.parent).substRecThis(tp, anchor)
+ case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo)
+ case tp: PolyParam => fixOrElse(bounds(tp).hi, tp)
+ case tp: TypeProxy => fixOrElse(tp.underlying, tp)
+ case tp: AndOrType => tp.derivedAndOrType(fix(tp.tp1), fix(tp.tp2))
+ case tp => tp
}
- projection.name == tpnme.hkApply && !other.isHKApply &&
- tryInfer(projection.prefix.typeConstructor.dealias)
- }
-
- /** Compare type lambda with non-lambda type. */
- def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match {
- case TypeLambda(vs, args, body) =>
- other.isInstanceOf[TypeRef] &&
- args.length == other.typeParams.length && {
- val applied = other.appliedTo(argRefs(rt, args.length))
- if (inOrder) isSubType(body, applied)
- else body match {
- case body: TypeBounds => body.contains(applied)
- case _ => isSubType(applied, body)
- }
- }
- case _ =>
- false
- }
-
- /** Say we are comparing a refined type `P{type M = U}` or `P{type M >: L <: U}`.
- * If P#M refers to a BaseTypeArg aliased to some other typeref P#N,
- * do the same comparison with `P{type N = U}` or `P{type N >: L <: U}`, respectively.
- * This allows to handle situations involving named type params like this one:
- *
- * trait Lambda[type Elem]
- * trait Lst[T] extends Lambda[T]
- *
- * compareAliasedRefined is necessary so we establish that
- *
- * Lst[Int] = Lst[Elem = Int]
- */
- private def compareAliasedRefined(rt: RefinedType, other: Type, inOrder: Boolean) = {
- val mbr = refinedSymbol(rt)
- mbr.is(BaseTypeArg) && {
- mbr.info match {
- case TypeAlias(TypeRef(_, aliasName)) =>
- val rt1 = rt.derivedRefinedType(rt.parent, aliasName, rt.refinedInfo)
- subtyping.println(i"rewiring $rt to $rt1 in comparison with $other")
- if (inOrder) isSubType(rt1, other) else isSubType(other, rt1)
- case _ =>
- false
- }
+ def fixOrElse(tp: Type, fallback: Type) = {
+ val tp1 = fix(tp)
+ if (tp1 ne tp) tp1 else fallback
}
+ fix(tp)
}
- /** The symbol referred to in the refinement of `rt` */
- private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol
-
- /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time
- * to keep the constraint as wide as possible. Specifically, if
+ /** Returns true iff the result of evaluating either `op1` or `op2` is true,
+ * trying at the same time to keep the constraint as wide as possible.
+ * E.g, if
*
* tp11 <:< tp12 = true with post-constraint c1
* tp12 <:< tp22 = true with post-constraint c2
@@ -673,15 +797,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* Here, each precondition leads to a different constraint, and neither of
* the two post-constraints subsumes the other.
*/
- private def eitherIsSubType(tp11: Type, tp21: Type, tp12: Type, tp22: Type) = {
+ private def either(op1: => Boolean, op2: => Boolean): Boolean = {
val preConstraint = constraint
- isSubType(tp11, tp21) && {
+ op1 && {
val leftConstraint = constraint
constraint = preConstraint
- if (!(isSubType(tp12, tp22) && subsumes(leftConstraint, constraint, preConstraint)))
+ if (!(op2 && subsumes(leftConstraint, constraint, preConstraint)))
constraint = leftConstraint
true
- } || isSubType(tp12, tp22)
+ } || op2
}
/** Like tp1 <:< tp2, but returns false immediately if we know that
@@ -699,27 +823,30 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* rebase both itself and the member info of `tp` on a freshly created skolem type.
*/
protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = {
- val rebindNeeded = tp2.refinementRefersToThis
- val base = if (rebindNeeded) ensureStableSingleton(tp1) else tp1
- val rinfo2 = if (rebindNeeded) tp2.refinedInfo.substRefinedThis(tp2, base) else tp2.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val mbr = tp1.member(name)
+
def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2)
- def memberMatches(mbr: Denotation): Boolean = mbr match { // inlined hasAltWith for performance
+
+ def memberMatches: Boolean = mbr match { // inlined hasAltWith for performance
case mbr: SingleDenotation => qualifies(mbr)
case _ => mbr hasAltWith qualifies
}
- /*>|>*/ ctx.traceIndented(i"hasMatchingMember($base . $name :? ${tp2.refinedInfo}) ${base.member(name).info.show} $rinfo2", subtyping) /*<|<*/ {
- memberMatches(base member name) ||
- tp1.isInstanceOf[SingletonType] &&
- { // special case for situations like:
- // class C { type T }
- // val foo: C
- // foo.type <: C { type T = foo.T }
- rinfo2 match {
- case rinfo2: TypeAlias =>
- !defn.isBottomType(base.widen) && (base select name) =:= rinfo2.alias
- case _ => false
- }
- }
+
+ // special case for situations like:
+ // class C { type T }
+ // val foo: C
+ // foo.type <: C { type T = foo.T }
+ def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && {
+ rinfo2 match {
+ case rinfo2: TypeAlias =>
+ !defn.isBottomType(tp1.widen) && (tp1 select name) =:= rinfo2.alias
+ case _ => false
+ }
+ }
+
+ /*>|>*/ ctx.traceIndented(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ {
+ memberMatches || selfReferentialMatch
}
}
@@ -738,11 +865,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* @return The parent type of `tp2` after skipping the matching refinements.
*/
private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
- case tp1 @ RefinedType(parent1, name1)
- if name1 == tp2.refinedName &&
- tp1.refinedInfo.isInstanceOf[TypeAlias] &&
- !tp2.refinementRefersToThis &&
- !tp1.refinementRefersToThis =>
+ case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName =>
tp2.parent match {
case parent2: RefinedType => skipMatching(parent1, parent2)
case parent2 => parent2
@@ -773,7 +896,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
/** A type has been covered previously in subtype checking if it
* is some combination of TypeRefs that point to classes, where the
- * combiners are RefinedTypes, AndTypes or AnnotatedTypes.
+ * combiners are RefinedTypes, RecTypes, AndTypes or AnnotatedTypes.
* One exception: Refinements referring to basetype args are never considered
* to be already covered. This is necessary because such refined types might
* still need to be compared with a compareAliasRefined.
@@ -781,7 +904,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match {
case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass
case tp: ProtoType => false
- case tp: RefinedType => isCovered(tp.parent) && !refinedSymbol(tp).is(BaseTypeArg)
+ case tp: RefinedOrRecType => isCovered(tp.parent)
case tp: AnnotatedType => isCovered(tp.underlying)
case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2)
case _ => false
@@ -874,10 +997,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
formals2.isEmpty
}
- /** Do poly types `poly1` and `poly2` have type parameters that
+ /** Do generic types `poly1` and `poly2` have type parameters that
* have the same bounds (after renaming one set to the other)?
*/
- private def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean =
+ private def matchingTypeParams(poly1: GenericType, poly2: GenericType): Boolean =
(poly1.paramBounds corresponds poly2.paramBounds)((b1, b2) =>
isSameType(b1, b2.subst(poly2, poly1)))
@@ -1047,7 +1170,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val t2 = distributeAnd(tp2, tp1)
if (t2.exists) t2
else if (erased) erasedGlb(tp1, tp2, isJava = false)
- else liftIfHK(tp1, tp2, AndType(_, _))
+ else liftIfHK(tp1, tp2, AndType(_, _), _ & _)
}
}
@@ -1071,7 +1194,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val t2 = distributeOr(tp2, tp1)
if (t2.exists) t2
else if (erased) erasedLub(tp1, tp2)
- else liftIfHK(tp1, tp2, OrType(_, _))
+ else liftIfHK(tp1, tp2, OrType(_, _), _ | _)
}
}
@@ -1092,14 +1215,25 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* allowing both interpretations. A possible remedy is to be somehow stricter
* in where we allow which interpretation.
*/
- private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = {
+ private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = {
val tparams1 = tp1.typeParams
val tparams2 = tp2.typeParams
- def onlyNamed(tparams: List[TypeSymbol]) = tparams.forall(!_.is(ExpandedName))
- if (tparams1.isEmpty || tparams2.isEmpty ||
- onlyNamed(tparams1) && onlyNamed(tparams2)) op(tp1, tp2)
- else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2)
- else hkCombine(tp1, tp2, tparams1, tparams2, op)
+ if (tparams1.isEmpty)
+ if (tparams2.isEmpty) op(tp1, tp2)
+ else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2))))
+ else if (tparams2.isEmpty)
+ original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2)
+ else
+ TypeLambda(
+ paramNames = tpnme.syntheticLambdaParamNames(tparams1.length),
+ variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ (tparam1.paramVariance + tparam2.paramVariance) / 2))(
+ paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds &
+ tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds),
+ resultTypeExp = tl =>
+ original(tl.lifted(tparams1, tp1).appliedTo(tl.paramRefs),
+ tl.lifted(tparams2, tp2).appliedTo(tl.paramRefs)))
}
/** Try to distribute `&` inside type, detect and handle conflicts
@@ -1112,13 +1246,28 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case tp1: RefinedType =>
tp2 match {
case tp2: RefinedType if tp1.refinedName == tp2.refinedName =>
- tp1.derivedRefinedType(
- tp1.parent & tp2.parent,
- tp1.refinedName,
- tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1)))
+ // Given two refinements `T1 { X = S1 }` and `T2 { X = S2 }`, if `S1 =:= S2`
+ // (possibly by instantiating type parameters), rewrite to `T1 & T2 { X = S1 }`.
+ // Otherwise rewrite to `T1 & T2 { X B }` where `B` is the conjunction of
+ // the bounds of `X` in `T1` and `T2`.
+ // The first rule above is contentious because it cuts the constraint set.
+ // But without it we would replace the two aliases by
+ // `T { X >: S1 | S2 <: S1 & S2 }`, which looks weird and is probably
+ // not what's intended.
+ val rinfo1 = tp1.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val parent = tp1.parent & tp2.parent
+ val rinfo =
+ if (rinfo1.isAlias && rinfo2.isAlias && isSameType(rinfo1, rinfo2))
+ rinfo1
+ else
+ rinfo1 & rinfo2
+ tp1.derivedRefinedType(parent, tp1.refinedName, rinfo)
case _ =>
NoType
}
+ case tp1: RecType =>
+ tp1.rebind(distributeAnd(tp1.parent, tp2))
case tp1: TypeBounds =>
tp2 match {
case tp2: TypeBounds => tp1 & tp2
@@ -1198,10 +1347,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case _ =>
mergeConflict(tp1, tp2)
}
- case tp1: PolyType =>
+ case tp1: GenericType =>
tp2 match {
- case tp2: PolyType if matchingTypeParams(tp1, tp2) =>
- tp1.derivedPolyType(
+ case tp2: GenericType if matchingTypeParams(tp1, tp2) =>
+ tp1.derivedGenericType(
mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1))
case _ =>
@@ -1224,7 +1373,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case bounds: TypeBounds => i"type bounds $bounds"
case _ => tp.show
}
- throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
+ if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
+ else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") // flip condition for debugging
}
/** Merge two lists of names. If names in corresponding positions match, keep them,
@@ -1376,25 +1526,18 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
}
override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean =
- traceIndented(s"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint") {
+ traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") {
super.addConstraint(param, bound, fromBelow)
}
override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx)
- override def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean) =
- if (projection.name == tpnme.hkApply)
- traceIndented(i"compareHkApply $projection, $other, $inOrder") {
- super.compareHkApply(projection, other, inOrder)
- }
- else super.compareHkApply(projection, other, inOrder)
-
- override def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) =
- if (rt.refinedName == tpnme.hkApply)
- traceIndented(i"compareHkLambda $rt, $other, $inOrder") {
- super.compareHkLambda(rt, other, inOrder)
- }
- else super.compareHkLambda(rt, other, inOrder)
+ override def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ def addendum = ""
+ traceIndented(i"compareHkApply $tp1, $tp2$addendum") {
+ super.compareHkApply2(tp1, tp2, tycon2, args2)
+ }
+ }
override def toString = "Subtype trace:" + { try b.toString finally b.clear() }
}
diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala
index 0894f34b2..a1dab16cb 100644
--- a/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -430,7 +430,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
// constructor method should not be semi-erased.
else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp)
else this(tp)
- case RefinedType(parent, _) if !(parent isRef defn.ArrayClass) =>
+ case RefinedType(parent, _, _) if !(parent isRef defn.ArrayClass) =>
eraseResult(parent)
case _ =>
this(tp)
@@ -474,6 +474,9 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
sigName(tp.widen)
case ExprType(rt) =>
sigName(defn.FunctionOf(Nil, rt))
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) sigName(inst) else tpnme.Uninstantiated
case tp: TypeProxy =>
sigName(tp.underlying)
case ErrorType | WildcardType =>
diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala
index 1288c0b23..80e0fc6f1 100644
--- a/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/src/dotty/tools/dotc/core/TypeOps.scala
@@ -6,6 +6,7 @@ import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
import SymDenotations._, Denotations.SingleDenotation
import config.Printers._
import util.Positions._
+import NameOps._
import Decorators._
import StdNames._
import Annotations._
@@ -193,10 +194,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
case tp: RefinedType => isClassRef(tp.parent)
case _ => false
}
- def next(tp: TypeProxy) = tp.underlying match {
- case TypeBounds(_, hi) => hi
- case nx => nx
- }
+
/** If `tp1` and `tp2` are typebounds, try to make one fit into the other
* or to make them equal, by instantiating uninstantiated type variables.
*/
@@ -226,19 +224,24 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
return tp1.derivedRefinedType(
approximateUnion(OrType(tp1.parent, tp2.parent)),
tp1.refinedName,
- homogenizedUnion(tp1.refinedInfo, tp2.refinedInfo).substRefinedThis(tp2, RefinedThis(tp1)))
+ homogenizedUnion(tp1.refinedInfo, tp2.refinedInfo))
//.ensuring { x => println(i"approx or $tp1 | $tp2 = $x\n constr = ${ctx.typerState.constraint}"); true } // DEBUG
case _ =>
}
case _ =>
}
+
tp1 match {
+ case tp1: RecType =>
+ tp1.rebind(approximateOr(tp1.parent, tp2))
case tp1: TypeProxy if !isClassRef(tp1) =>
- approximateUnion(next(tp1) | tp2)
+ approximateUnion(tp1.superType | tp2)
case _ =>
tp2 match {
+ case tp2: RecType =>
+ tp2.rebind(approximateOr(tp1, tp2.parent))
case tp2: TypeProxy if !isClassRef(tp2) =>
- approximateUnion(tp1 | next(tp2))
+ approximateUnion(tp1 | tp2.superType)
case _ =>
val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect)
val doms = dominators(commonBaseClasses, Nil)
@@ -252,16 +255,32 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
if (ctx.featureEnabled(defn.LanguageModuleClass, nme.keepUnions)) tp
else tp match {
case tp: OrType =>
- approximateOr(tp.tp1, tp.tp2)
+ approximateOr(tp.tp1, tp.tp2) // Maybe refactor using liftToRec?
case tp @ AndType(tp1, tp2) =>
tp derived_& (approximateUnion(tp1), approximateUnion(tp2))
case tp: RefinedType =>
tp.derivedRefinedType(approximateUnion(tp.parent), tp.refinedName, tp.refinedInfo)
+ case tp: RecType =>
+ tp.rebind(approximateUnion(tp.parent))
case _ =>
tp
}
}
+ /** Not currently needed:
+ *
+ def liftToRec(f: (Type, Type) => Type)(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ def f2(tp1: Type, tp2: Type): Type = tp2 match {
+ case tp2: RecType => tp2.rebind(f(tp1, tp2.parent))
+ case _ => f(tp1, tp2)
+ }
+ tp1 match {
+ case tp1: RecType => tp1.rebind(f2(tp1.parent, tp2))
+ case _ => f2(tp1, tp2)
+ }
+ }
+ */
+
private def enterArgBinding(formal: Symbol, info: Type, cls: ClassSymbol, decls: Scope) = {
val lazyInfo = new LazyType { // needed so we do not force `formal`.
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
@@ -361,28 +380,37 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter
// Strip all refinements from parent type, populating `refinements` and `formals` maps.
- def normalizeToRef(tp: Type): TypeRef = tp.dealias match {
- case tp: TypeRef =>
- tp
- case tp @ RefinedType(tp1, name: TypeName) =>
- tp.refinedInfo match {
- case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
- // Don't record refinements of the form X = this.X (These can arise using named parameters).
- typr.println(s"dropping refinement $tp")
- case _ =>
- val prevInfo = refinements(name)
- refinements = refinements.updated(name,
- if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
- formals = formals.updated(name, tp1.typeParamNamed(name))
- }
- normalizeToRef(tp1)
- case ErrorType =>
- defn.AnyType
- case AnnotatedType(tpe, _) =>
- normalizeToRef(tpe)
- case _ =>
- throw new TypeError(s"unexpected parent type: $tp")
+ def normalizeToRef(tp: Type): TypeRef = {
+ def fail = throw new TypeError(s"unexpected parent type: $tp")
+ tp.dealias match {
+ case tp: TypeRef =>
+ tp
+ case tp @ RefinedType(tp1, name: TypeName, rinfo) =>
+ rinfo match {
+ case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
+ // Don't record refinements of the form X = this.X (These can arise using named parameters).
+ typr.println(s"dropping refinement $tp")
+ case _ =>
+ val prevInfo = refinements(name)
+ refinements = refinements.updated(name,
+ if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
+ formals = formals.updated(name, tp1.typeParamNamed(name))
+ }
+ normalizeToRef(tp1)
+ case ErrorType =>
+ defn.AnyType
+ case AnnotatedType(tpe, _) =>
+ normalizeToRef(tpe)
+ case HKApply(tycon: TypeRef, args) =>
+ tycon.info match {
+ case TypeAlias(alias) => normalizeToRef(alias.appliedTo(args))
+ case _ => fail
+ }
+ case _ =>
+ fail
+ }
}
+
val parentRefs = parents map normalizeToRef
// Enter all refinements into current scope.
diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala
new file mode 100644
index 000000000..647c895db
--- /dev/null
+++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.core
+
+import Names.TypeName
+import Contexts.Context
+import Types.{Type, TypeBounds}
+
+/** A common super trait of Symbol and LambdaParam.
+ * Used to capture the attributes of type parameters which can be implemented as either.
+ */
+trait TypeParamInfo {
+
+ /** Is this the info of a type parameter? Will return `false` for symbols
+ * that are not type parameters.
+ */
+ def isTypeParam(implicit ctx: Context): Boolean
+
+ /** The name of the type parameter */
+ def paramName(implicit ctx: Context): TypeName
+
+ /** The info of the type parameter */
+ def paramBounds(implicit ctx: Context): TypeBounds
+
+ /** The info of the type parameter as seen from a prefix type.
+ * For type parameter symbols, this is the `memberInfo` as seen from `prefix`.
+ * For type lambda parameters, it's the same as `paramBounds` as
+ * `asSeenFrom` has already been applied to the whole type lambda.
+ */
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds
+
+ /** The parameter bounds, or the completer if the type parameter
+ * is an as-yet uncompleted symbol.
+ */
+ def paramBoundsOrCompleter(implicit ctx: Context): Type
+
+ /** The variance of the type parameter */
+ def paramVariance(implicit ctx: Context): Int
+
+ /** A type that refers to the parameter */
+ def paramRef(implicit ctx: Context): Type
+} \ No newline at end of file
diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala
index 36f026107..e64335218 100644
--- a/src/dotty/tools/dotc/core/TyperState.scala
+++ b/src/dotty/tools/dotc/core/TyperState.scala
@@ -127,7 +127,7 @@ extends TyperState(r) {
}
override def gc()(implicit ctx: Context): Unit = {
- val toCollect = new mutable.ListBuffer[PolyType]
+ val toCollect = new mutable.ListBuffer[GenericType]
constraint foreachTypeVar { tvar =>
if (!tvar.inst.exists) {
val inst = instType(tvar)
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index f514a329e..fa402f9fc 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -51,10 +51,12 @@ object Types {
* | | +--- SuperType
* | | +--- ConstantType
* | | +--- MethodParam
- * | | +----RefinedThis
+ * | | +----RecThis
* | | +--- SkolemType
* | +- PolyParam
- * | +- RefinedType
+ * | +- RefinedOrRecType -+-- RefinedType
+ * | | -+-- RecType
+ * | +- HKApply
* | +- TypeBounds
* | +- ExprType
* | +- AnnotatedType
@@ -64,8 +66,9 @@ object Types {
* +- OrType
* +- MethodType -----+- ImplicitMethodType
* | +- JavaMethodType
- * +- PolyType
* +- ClassInfo
+ * +- GenericType ----+- PolyType
+ * | +- TypeLambda
* |
* +- NoType
* +- NoPrefix
@@ -97,7 +100,7 @@ object Types {
final def isStable(implicit ctx: Context): Boolean = stripTypeVar match {
case tp: TermRef => tp.termSymbol.isStable && tp.prefix.isStable
case _: SingletonType | NoPrefix => true
- case tp: RefinedType => tp.parent.isStable
+ case tp: RefinedOrRecType => tp.parent.isStable
case _ => false
}
@@ -113,10 +116,9 @@ object Types {
case TypeAlias(tp) => tp.isRef(sym)
case _ => this1.symbol eq sym
}
- case this1: RefinedType =>
- this1.parent.isRef(sym)
- case _ =>
- false
+ case this1: RefinedOrRecType => this1.parent.isRef(sym)
+ case this1: HKApply => this1.superType.isRef(sym)
+ case _ => false
}
/** Is this type a (neither aliased nor applied) reference to class `sym`? */
@@ -146,7 +148,7 @@ object Types {
def loop(tp: Type) = tp match {
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls)
+ if (sym.isClass) sym.derivesFrom(cls) else tp.superType.derivesFrom(cls)
case tp: TypeProxy =>
tp.underlying.derivesFrom(cls)
case tp: AndType =>
@@ -213,29 +215,6 @@ object Types {
/** Is this an alias TypeBounds? */
def isAlias: Boolean = this.isInstanceOf[TypeAlias]
- /** Is this type a transitive refinement of the given type?
- * This is true if the type consists of 0 or more refinements or other
- * non-singleton proxies that lead to the `prefix` type. ClassInfos with
- * the same class are counted as equal for this purpose.
- */
- def refines(prefix: Type)(implicit ctx: Context): Boolean = {
- val prefix1 = prefix.dealias
- def loop(tp: Type): Boolean =
- (tp eq prefix1) || {
- tp match {
- case base: ClassInfo =>
- prefix1 match {
- case prefix1: ClassInfo => base.cls eq prefix1.cls
- case _ => false
- }
- case base: SingletonType => false
- case base: TypeProxy => loop(base.underlying)
- case _ => false
- }
- }
- loop(this)
- }
-
// ----- Higher-order combinators -----------------------------------
/** Returns true if there is a part of this type that satisfies predicate `p`.
@@ -286,8 +265,8 @@ object Types {
case _ => NoSymbol
}
- /** The least class or trait of which this type is a subtype, or
- * NoSymbol if none exists (either because this type is not a
+ /** The least class or trait of which this type is a subtype or parameterized
+ * instance, or NoSymbol if none exists (either because this type is not a
* value type, or because superclasses are ambiguous).
*/
final def classSymbol(implicit ctx: Context): Symbol = this match {
@@ -295,7 +274,7 @@ object Types {
constant.tpe.classSymbol
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym else tp.underlying.classSymbol
+ if (sym.isClass) sym else tp.superType.classSymbol
case tp: ClassInfo =>
tp.cls
case tp: SingletonType =>
@@ -325,7 +304,7 @@ object Types {
tp.cls :: Nil
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym.asClass :: Nil else tp.underlying.classSymbols
+ if (sym.isClass) sym.asClass :: Nil else tp.superType.classSymbols
case tp: TypeProxy =>
tp.underlying.classSymbols
case AndType(l, r) =>
@@ -447,6 +426,10 @@ object Types {
})
case tp: PolyParam =>
goParam(tp)
+ case tp: RecType =>
+ goRec(tp)
+ case tp: HKApply =>
+ goApply(tp)
case tp: TypeProxy =>
go(tp.underlying)
case tp: ClassInfo =>
@@ -462,11 +445,49 @@ object Types {
case _ =>
NoDenotation
}
+ def goRec(tp: RecType) =
+ if (tp.parent == null) NoDenotation
+ else {
+ //println(s"find member $pre . $name in $tp")
+
+ // We have to be careful because we might open the same (wrt eq) recursive type
+ // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)`
+ // call below. To avoid this problem we do a defensive copy of the recursive
+ // type first. But if we do this always we risk being inefficient and we ran into
+ // stackoverflows when compiling pos/hk.scala under the refinement encoding
+ // of hk-types. So we only do a copy if the type
+ // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`.
+ // Furthermore, if this happens we mark the original recursive type with `openedTwice`
+ // which means that we always defensively copy the type in the future. This second
+ // measure is necessary because findMember calls might be cached, so do not
+ // necessarily appear in nested order.
+ // Without the defensive copy, Typer.scala fails to compile at the line
+ //
+ // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType)
+ //
+ // because the subtype check
+ //
+ // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed]
+ //
+ // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.)
+ //
+ // Without the `openedTwice` trick, Typer.scala fails to Ycheck
+ // at phase resolveSuper.
+ val rt =
+ if (tp.opened) { // defensive copy
+ tp.openedTwice = true
+ RecType(rt => tp.parent.substRecThis(tp, RecThis(rt)))
+ } else tp
+ rt.opened = true
+ try go(rt.parent).mapInfo(_.substRecThis(rt, pre))
+ finally {
+ if (!rt.openedTwice) rt.opened = false
+ }
+ }
+
def goRefined(tp: RefinedType) = {
val pdenot = go(tp.parent)
- val rinfo =
- if (tp.refinementRefersToThis) tp.refinedInfo.substRefinedThis(tp, pre)
- else tp.refinedInfo
+ val rinfo = tp.refinedInfo
if (name.isTypeName) { // simplified case that runs more efficiently
val jointInfo =
if (rinfo.isAlias) rinfo
@@ -492,6 +513,15 @@ object Types {
safeIntersection = ctx.pendingMemberSearches.contains(name))
}
}
+
+ def goApply(tp: HKApply) = tp.tycon match {
+ case tl: TypeLambda =>
+ go(tl.resType).mapInfo(info =>
+ tl.derivedLambdaAbstraction(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args))
+ case _ =>
+ go(tp.superType)
+ }
+
def goThis(tp: ThisType) = {
val d = go(tp.underlying)
if (d.exists)
@@ -534,10 +564,11 @@ object Types {
ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches
}
+ //assert(ctx.findMemberCount < 20)
try go(this)
catch {
case ex: Throwable =>
- core.println(i"findMember exception for $this member $name")
+ core.println(i"findMember exception for $this member $name, pre = $pre")
throw ex // DEBUG
}
finally {
@@ -823,16 +854,14 @@ object Types {
case tp: TypeVar =>
val tp1 = tp.instanceOpt
if (tp1.exists) tp1.dealias else tp
- case tp: LazyRef =>
- tp.ref.dealias
case tp: AnnotatedType =>
tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot)
- case tp => tp
- }
-
- /** If this is a TypeAlias type, its alias otherwise this type itself */
- final def followTypeAlias(implicit ctx: Context): Type = this match {
- case TypeAlias(alias) => alias
+ case tp: LazyRef =>
+ tp.ref.dealias
+ case app @ HKApply(tycon, args) =>
+ val tycon1 = tycon.dealias
+ if (tycon1 ne tycon) app.superType.dealias
+ else this
case _ => this
}
@@ -859,12 +888,16 @@ object Types {
if (tp.symbol.isClass) tp
else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK)
else NoType
- case tp: AnnotatedType => tp.underlying.underlyingClassRef(refinementOK)
+ case tp: AnnotatedType =>
+ tp.underlying.underlyingClassRef(refinementOK)
case tp: RefinedType =>
def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName)
if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK)
else NoType
- case _ => NoType
+ case tp: RecType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case _ =>
+ NoType
}
/** The iterator of underlying types as long as type is a TypeProxy.
@@ -887,6 +920,14 @@ object Types {
def narrow(implicit ctx: Context): TermRef =
TermRef(NoPrefix, ctx.newSkolem(this))
+ /** Useful for diagnsotics: The underlying type if this type is a type proxy,
+ * otherwise NoType
+ */
+ def underlyingIfProxy(implicit ctx: Context) = this match {
+ case this1: TypeProxy => this1.underlying
+ case _ => NoType
+ }
+
// ----- Normalizing typerefs over refined types ----------------------------
/** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed
@@ -902,64 +943,23 @@ object Types {
*
* P { type T = String, type R = P{...}.T } # R --> String
*
- * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply".
- * In this case the rhs of the apply is returned with all references to lambda argument types
- * substituted by their definitions.
- *
* (*) normalizes means: follow instantiated typevars and aliases.
*/
def lookupRefined(name: Name)(implicit ctx: Context): Type = {
def loop(pre: Type): Type = pre.stripTypeVar match {
case pre: RefinedType =>
- object instantiate extends TypeMap {
- var isSafe = true
- def apply(tp: Type): Type =
- if (!isSafe) tp
- else tp match {
- case TypeRef(RefinedThis(`pre`), name) if name.isHkArgName =>
- member(name).info match {
- case TypeAlias(alias) => alias
- case _ => isSafe = false; tp
- }
- case tp: TypeVar if !tp.inst.exists =>
- isSafe = false
- tp
- case _ =>
- mapOver(tp)
- }
- }
- def instArg(tp: Type): Type = tp match {
- case tp @ TypeAlias(TypeRef(RefinedThis(`pre`), name)) if name.isHkArgName =>
- member(name).info match {
- case TypeAlias(alias) => tp.derivedTypeAlias(alias) // needed to keep variance
- case bounds => bounds
- }
- case _ =>
- instantiate(tp)
- }
- def instTop(tp: Type): Type = tp.stripTypeVar match {
- case tp: RefinedType =>
- tp.derivedRefinedType(instTop(tp.parent), tp.refinedName, instArg(tp.refinedInfo))
- case _ =>
- instantiate(tp)
- }
- /** Reduce rhs of $hkApply to make it stand alone */
- def betaReduce(tp: Type) = {
- val reduced = instTop(tp)
- if (instantiate.isSafe) reduced else NoType
- }
pre.refinedInfo match {
case TypeAlias(alias) =>
- if (pre.refinedName ne name) loop(pre.parent)
- else if (!pre.refinementRefersToThis) alias
- else alias match {
- case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1)
- case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce
- }
+ if (pre.refinedName ne name) loop(pre.parent) else alias
case _ => loop(pre.parent)
}
- case RefinedThis(binder) =>
- binder.lookupRefined(name)
+ case pre: RecType =>
+ val candidate = loop(pre.parent)
+ if (candidate.exists && !pre.isReferredToBy(candidate)) {
+ //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}")
+ candidate
+ }
+ else NoType
case SkolemType(tp) =>
tp.lookupRefined(name)
case pre: WildcardType =>
@@ -1023,7 +1023,7 @@ object Types {
/** The full parent types, including all type arguments */
def parentsWithArgs(implicit ctx: Context): List[Type] = this match {
- case tp: TypeProxy => tp.underlying.parentsWithArgs
+ case tp: TypeProxy => tp.superType.parentsWithArgs
case _ => List()
}
@@ -1035,9 +1035,9 @@ object Types {
/** the self type of the underlying classtype */
def givenSelfType(implicit ctx: Context): Type = this match {
- case tp @ RefinedType(parent, name) => tp.wrapIfMember(parent.givenSelfType)
+ case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType)
case tp: ThisType => tp.tref.givenSelfType
- case tp: TypeProxy => tp.underlying.givenSelfType
+ case tp: TypeProxy => tp.superType.givenSelfType
case _ => NoType
}
@@ -1056,10 +1056,10 @@ object Types {
}
- /** The parameter types in the first parameter section of a PolyType or MethodType, Empty list for others */
+ /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */
final def firstParamTypes(implicit ctx: Context): List[Type] = this match {
case mt: MethodType => mt.paramTypes
- case pt: PolyType => pt.resultType.firstParamTypes
+ case pt: GenericType => pt.resultType.firstParamTypes
case _ => Nil
}
@@ -1148,9 +1148,9 @@ object Types {
final def substThisUnlessStatic(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
if (cls.isStaticOwner) this else ctx.substThis(this, cls, tp, null)
- /** Substitute all occurrences of `SkolemType(binder)` by `tp` */
- final def substRefinedThis(binder: Type, tp: Type)(implicit ctx: Context): Type =
- ctx.substRefinedThis(this, binder, tp, null)
+ /** Substitute all occurrences of `RecThis(binder)` by `tp` */
+ final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type =
+ ctx.substRecThis(this, binder, tp, null)
/** Substitute a bound type by some other type */
final def substParam(from: ParamType, to: Type)(implicit ctx: Context): Type =
@@ -1169,8 +1169,8 @@ object Types {
/** Turn type into a function type.
* @pre this is a non-dependent method type.
- * @param drop The number of trailing parameters that should be dropped
- * when forming the function type.
+ * @param dropLast The number of trailing parameters that should be dropped
+ * when forming the function type.
*/
def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match {
case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) =>
@@ -1246,8 +1246,15 @@ object Types {
* Each implementation is expected to redefine the `underlying` method.
*/
abstract class TypeProxy extends Type {
+
/** The type to which this proxy forwards operations. */
def underlying(implicit ctx: Context): Type
+
+ /** The closest supertype of this type. This is the same as `underlying`,
+ * except for TypeRefs where the upper bound is returned, and HKApplys,
+ * where the upper bound of the constructor is re-applied to the arguments.
+ */
+ def superType(implicit ctx: Context): Type = underlying
}
// Every type has to inherit one of the following four abstract type classes.,
@@ -1306,13 +1313,15 @@ object Types {
/** A marker trait for types that apply only to type symbols */
trait TypeType extends Type
- /** A marker trait for types that apply only to term symbols */
+ /** A marker trait for types that apply only to term symbols or that
+ * represent higher-kinded types.
+ */
trait TermType extends Type
/** A marker trait for types that can be types of values or prototypes of value types */
trait ValueTypeOrProto extends TermType
- /** A marker trait for types that can be types of values */
+ /** A marker trait for types that can be types of values or that are higher-kinded */
trait ValueType extends ValueTypeOrProto
/** A marker trait for types that are guaranteed to contain only a
@@ -1399,6 +1408,9 @@ object Types {
else computeDenot
}
+ /** Hook for adding debug check code when denotations are assigned */
+ final def checkDenot()(implicit ctx: Context) = {}
+
/** A second fallback to recompute the denotation if necessary */
private def computeDenot(implicit ctx: Context): Denotation = {
val savedEphemeral = ctx.typerState.ephemeral
@@ -1434,6 +1446,7 @@ object Types {
// Don't use setDenot here; double binding checks can give spurious failures after erasure
lastDenotation = d
+ checkDenot()
lastSymbol = d.symbol
checkedPeriod = ctx.period
}
@@ -1505,6 +1518,7 @@ object Types {
// additional checks that intercept `denot` can be added here
lastDenotation = denot
+ checkDenot()
lastSymbol = denot.symbol
checkedPeriod = Nowhere
}
@@ -1544,15 +1558,16 @@ object Types {
}
}
- protected def asMemberOf(prefix: Type)(implicit ctx: Context) =
+ protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation =
if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed)
else prefix.member(name)
+
/** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type
* to an (unbounded) wildcard type.
*
* (2) Reduce a type-ref `T { X = U; ... } # X` to `U`
- * provided `U` does not refer with a RefinedThis to the
+ * provided `U` does not refer with a RecThis to the
* refinement type `T { X = U; ... }`
*/
def reduceProjection(implicit ctx: Context): Type = {
@@ -1607,7 +1622,7 @@ object Types {
ctx.underlyingRecursions -= 1
}
- /** A selection of the same kind, but with potentially a differet prefix.
+ /** A selection of the same kind, but with potentially a different prefix.
* The following normalizations are performed for type selections T#A:
*
* T#A --> B if A is bound to an alias `= B` in T
@@ -1624,13 +1639,6 @@ object Types {
else if (isType) {
val res = prefix.lookupRefined(name)
if (res.exists) res
- else if (name == tpnme.hkApply && prefix.classNotLambda) {
- // After substitution we might end up with a type like
- // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply`
- // where C is a class. In that case we eta expand `C`.
- if (defn.isBottomType(prefix)) prefix.classSymbol.typeRef
- else derivedSelect(prefix.EtaExpandCore)
- }
else if (Config.splitProjections)
prefix match {
case prefix: AndType =>
@@ -1735,6 +1743,11 @@ object Types {
type ThisType = TypeRef
override def underlying(implicit ctx: Context): Type = info
+
+ override def superType(implicit ctx: Context): Type = info match {
+ case TypeBounds(_, hi) => hi
+ case _ => info
+ }
}
final class TermRefWithSignature(prefix: Type, name: TermName, override val sig: Signature) extends TermRef(prefix, name) {
@@ -1904,15 +1917,9 @@ object Types {
}
object TypeRef {
- def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) =
- if (name == tpnme.hkApply && prefix.classNotLambda)
- assert(false, s"bad type : $prefix.$name does not allow $$Apply projection")
-
/** Create type ref with given prefix and name */
- def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = {
- if (Config.checkProjections) checkProjection(prefix, name)
+ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef =
ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef]
- }
/** Create type ref to given symbol */
def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
@@ -1921,10 +1928,8 @@ object Types {
/** Create a non-member type ref (which cannot be reloaded using `member`),
* with given prefix, name, and symbol.
*/
- def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = {
- if (Config.checkProjections) checkProjection(prefix, name)
+ def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
unique(new TypeRefWithFixedSym(prefix, name, sym))
- }
/** Create a type ref referring to given symbol with given name.
* This is very similar to TypeRef(Type, Symbol),
@@ -2022,46 +2027,29 @@ object Types {
override def hashCode = ref.hashCode + 37
}
- // --- Refined Type ---------------------------------------------------------
+ // --- Refined Type and RecType ------------------------------------------------
+
+ abstract class RefinedOrRecType extends CachedProxyType with ValueType {
+ def parent: Type
+ }
/** A refined type parent { refinement }
* @param refinedName The name of the refinement declaration
* @param infoFn: A function that produces the info of the refinement declaration,
* given the refined type itself.
*/
- abstract case class RefinedType(parent: Type, refinedName: Name)
- extends CachedProxyType with BindingType with ValueType {
-
- val refinedInfo: Type
-
- private var refinementRefersToThisCache: Boolean = _
- private var refinementRefersToThisKnown: Boolean = false
-
- def refinementRefersToThis(implicit ctx: Context): Boolean = {
- if (!refinementRefersToThisKnown) {
- refinementRefersToThisCache = refinedInfo.containsRefinedThis(this)
- refinementRefersToThisKnown = true
- }
- refinementRefersToThisCache
- }
+ abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType {
override def underlying(implicit ctx: Context) = parent
private def badInst =
throw new AssertionError(s"bad instantiation: $this")
- def checkInst(implicit ctx: Context): this.type = {
- if (refinedName == tpnme.hkApply)
- parent.stripTypeVar match {
- case RefinedType(_, name) if name.isHkArgName => // ok
- case _ => badInst
- }
- this
- }
+ def checkInst(implicit ctx: Context): this.type = this // debug hook
- def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType =
+ def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type =
if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this
- else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt)))
+ else RefinedType(parent, refinedName, refinedInfo)
/** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */
def wrapIfMember(parent: Type)(implicit ctx: Context): Type =
@@ -2077,28 +2065,19 @@ object Types {
false
}
override def computeHash = doHash(refinedName, refinedInfo, parent)
- override def toString = s"RefinedType($parent, $refinedName, $refinedInfo | $hashCode)"
- }
-
- class CachedRefinedType(parent: Type, refinedName: Name, infoFn: RefinedType => Type) extends RefinedType(parent, refinedName) {
- val refinedInfo = infoFn(this)
+ override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)"
}
- class PreHashedRefinedType(parent: Type, refinedName: Name, override val refinedInfo: Type, hc: Int)
- extends RefinedType(parent, refinedName) {
+ class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int)
+ extends RefinedType(parent, refinedName, refinedInfo) {
myHash = hc
override def computeHash = unsupported("computeHash")
}
object RefinedType {
- def make(parent: Type, names: List[Name], infoFns: List[RefinedType => Type])(implicit ctx: Context): Type =
+ def make(parent: Type, names: List[Name], infos: List[Type])(implicit ctx: Context): Type =
if (names.isEmpty) parent
- else make(RefinedType(parent, names.head, infoFns.head), names.tail, infoFns.tail)
-
- def apply(parent: Type, name: Name, infoFn: RefinedType => Type)(implicit ctx: Context): RefinedType = {
- assert(!ctx.erasedTypes || ctx.mode.is(Mode.Printing))
- ctx.base.uniqueRefinedTypes.enterIfNew(new CachedRefinedType(parent, name, infoFn)).checkInst
- }
+ else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail)
def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = {
assert(!ctx.erasedTypes)
@@ -2106,6 +2085,83 @@ object Types {
}
}
+ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType {
+
+ // See discussion in findMember#goRec why these vars are needed
+ private[Types] var opened: Boolean = false
+ private[Types] var openedTwice: Boolean = false
+
+ val parent = parentExp(this)
+
+ override def underlying(implicit ctx: Context): Type = parent
+
+ def derivedRecType(parent: Type)(implicit ctx: Context): RecType =
+ if (parent eq this.parent) this
+ else RecType(rt => parent.substRecThis(this, RecThis(rt)))
+
+ def rebind(parent: Type)(implicit ctx: Context): Type =
+ if (parent eq this.parent) this
+ else RecType.closeOver(rt => parent.substRecThis(this, RecThis(rt)))
+
+ override def equals(other: Any) = other match {
+ case other: RecType => other.parent == this.parent
+ case _ => false
+ }
+
+ def isReferredToBy(tp: Type)(implicit ctx: Context): Boolean = {
+ val refacc = new TypeAccumulator[Boolean] {
+ override def apply(x: Boolean, tp: Type) = x || {
+ tp match {
+ case tp: TypeRef => apply(x, tp.prefix)
+ case tp: RecThis => RecType.this eq tp.binder
+ case tp: LazyRef => true // To be safe, assume a reference exists
+ case _ => foldOver(x, tp)
+ }
+ }
+ }
+ refacc.apply(false, tp)
+ }
+
+ override def computeHash = doHash(parent)
+ override def toString = s"RecType($parent | $hashCode)"
+
+ private def checkInst(implicit ctx: Context): this.type = this // debug hook
+ }
+
+ object RecType {
+
+ /** Create a RecType, normalizing its contents. This means:
+ *
+ * 1. Nested Rec types on the type's spine are merged with the outer one.
+ * 2. Any refinement of the form `type T = z.T` on the spine of the type
+ * where `z` refers to the created rec-type is replaced by
+ * `type T`. This avoids infinite recursons later when we
+ * try to follow these references.
+ * TODO: Figure out how to guarantee absence of cycles
+ * of length > 1
+ */
+ def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = {
+ val rt = new RecType(parentExp)
+ def normalize(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType =>
+ normalize(tp.parent.substRecThis(tp, RecThis(rt)))
+ case tp @ RefinedType(parent, rname, rinfo) =>
+ val rinfo1 = rinfo match {
+ case TypeAlias(TypeRef(RecThis(`rt`), `rname`)) => TypeBounds.empty
+ case _ => rinfo
+ }
+ tp.derivedRefinedType(normalize(parent), rname, rinfo1)
+ case tp =>
+ tp
+ }
+ unique(rt.derivedRecType(normalize(rt.parent))).checkInst
+ }
+ def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = {
+ val rt = this(parentExp)
+ if (rt.isReferredToBy(rt.parent)) rt else rt.parent
+ }
+ }
+
// --- AndType/OrType ---------------------------------------------------------------
trait AndOrType extends ValueType { // todo: check where we can simplify using AndOrType
@@ -2137,7 +2193,7 @@ object Types {
object AndType {
def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
- assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType])
+ assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType], i"$tp1 & $tp2 / " + s"$tp1 & $tp2")
unchecked(tp1, tp2)
}
def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = {
@@ -2205,7 +2261,7 @@ object Types {
final override def signature(implicit ctx: Context): Signature = {
if (ctx.runId != mySignatureRunId) {
mySignature = computeSignature
- mySignatureRunId = ctx.runId
+ if (!mySignature.isUnderDefined) mySignatureRunId = ctx.runId
}
mySignature
}
@@ -2412,70 +2468,239 @@ object Types {
}
}
- abstract case class PolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
- extends CachedGroundType with BindingType with TermType with MethodOrPoly {
+ /** A common supertrait of PolyType and TypeLambda */
+ trait GenericType extends BindingType with TermType {
- val paramBounds = paramBoundsExp(this)
- val resType = resultTypeExp(this)
+ /** The names of the type parameters */
+ val paramNames: List[TypeName]
- assert(resType ne null)
+ /** The bounds of the type parameters */
+ val paramBounds: List[TypeBounds]
- override def resultType(implicit ctx: Context) = resType
+ /** The result type of a PolyType / body of a type lambda */
+ val resType: Type
- protected def computeSignature(implicit ctx: Context) = resultSignature
+ /** If this is a type lambda, the variances of its parameters, otherwise Nil.*/
+ def variances: List[Int]
- def isPolymorphicMethodType: Boolean = resType match {
- case _: MethodType => true
- case _ => false
- }
+ override def resultType(implicit ctx: Context) = resType
+
+ /** Unconditionally create a new generic type like this one with given elements */
+ def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): GenericType
- def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
+ /** Instantiate result type by substituting parameters with given arguments */
+ final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
resultType.substParams(this, argTypes)
+ /** Instantiate parameter bounds by substituting parameters with given arguments */
def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] =
paramBounds.mapConserve(_.substParams(this, argTypes).bounds)
- def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) =
+ def derivedGenericType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) =
if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this
else duplicate(paramNames, paramBounds, resType)
- def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context) =
- PolyType(paramNames)(
- x => paramBounds mapConserve (_.subst(this, x).bounds),
- x => resType.subst(this, x))
+ /** PolyParam references to all type parameters of this type */
+ lazy val paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _))
+
+ /** The type `[tparams := paramRefs] tp`, where `tparams` can be
+ * either a list of type parameter symbols or a list of lambda parameters
+ */
+ def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type =
+ tparams match {
+ case LambdaParam(poly, _) :: _ => tp.subst(poly, this)
+ case tparams: List[Symbol @unchecked] => tp.subst(tparams, paramRefs)
+ }
override def equals(other: Any) = other match {
- case other: PolyType =>
- other.paramNames == this.paramNames && other.paramBounds == this.paramBounds && other.resType == this.resType
+ case other: GenericType =>
+ other.paramNames == this.paramNames &&
+ other.paramBounds == this.paramBounds &&
+ other.resType == this.resType &&
+ other.variances == this.variances
case _ => false
}
- override def computeHash = {
- doHash(paramNames, resType, paramBounds)
+ }
+
+ /** A type for polymorphic methods */
+ class PolyType(val paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)
+ extends CachedGroundType with GenericType with MethodOrPoly {
+ val paramBounds = paramBoundsExp(this)
+ val resType = resultTypeExp(this)
+ def variances = Nil
+
+ protected def computeSignature(implicit ctx: Context) = resultSignature
+
+ def isPolymorphicMethodType: Boolean = resType match {
+ case _: MethodType => true
+ case _ => false
}
+ def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): PolyType =
+ derivedGenericType(paramNames, paramBounds, resType).asInstanceOf[PolyType]
+
+ def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): PolyType =
+ PolyType(paramNames)(
+ x => paramBounds mapConserve (_.subst(this, x).bounds),
+ x => resType.subst(this, x))
+
override def toString = s"PolyType($paramNames, $paramBounds, $resType)"
- }
- class CachedPolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
- extends PolyType(paramNames)(paramBoundsExp, resultTypeExp)
+ override def computeHash = doHash(paramNames, resType, paramBounds)
+ }
object PolyType {
- def apply(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = {
- unique(new CachedPolyType(paramNames)(paramBoundsExp, resultTypeExp))
+ def apply(paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)(implicit ctx: Context): PolyType = {
+ unique(new PolyType(paramNames)(paramBoundsExp, resultTypeExp))
}
def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) =
if (tparams.isEmpty) resultType
- else {
- def transform(pt: PolyType, tp: Type) =
- tp.subst(tparams, (0 until tparams.length).toList map (PolyParam(pt, _)))
- apply(tparams map (_.name.asTypeName))(
- pt => tparams map (tparam => transform(pt, tparam.info).bounds),
- pt => transform(pt, resultType))
+ else apply(tparams map (_.name.asTypeName))(
+ pt => tparams.map(tparam => pt.lifted(tparams, tparam.info).bounds),
+ pt => pt.lifted(tparams, resultType))
+ }
+
+ // ----- HK types: TypeLambda, LambdaParam, HKApply ---------------------
+
+ /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */
+ class TypeLambda(val paramNames: List[TypeName], val variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)
+ extends CachedProxyType with GenericType with ValueType {
+ val paramBounds = paramBoundsExp(this)
+ val resType = resultTypeExp(this)
+
+ assert(resType.isInstanceOf[TermType], this)
+ assert(paramNames.nonEmpty)
+
+ override def underlying(implicit ctx: Context) = resType
+
+ lazy val typeParams: List[LambdaParam] =
+ paramNames.indices.toList.map(new LambdaParam(this, _))
+
+ def derivedLambdaAbstraction(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type =
+ resType match {
+ case resType @ TypeAlias(alias) =>
+ resType.derivedTypeAlias(duplicate(paramNames, paramBounds, alias))
+ case resType @ TypeBounds(lo, hi) =>
+ resType.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else duplicate(paramNames, paramBounds, lo),
+ duplicate(paramNames, paramBounds, hi))
+ case _ =>
+ derivedTypeLambda(paramNames, paramBounds, resType)
+ }
+
+ def derivedTypeLambda(paramNames: List[TypeName] = paramNames, paramBounds: List[TypeBounds] = paramBounds, resType: Type)(implicit ctx: Context): TypeLambda =
+ derivedGenericType(paramNames, paramBounds, resType).asInstanceOf[TypeLambda]
+
+ def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context): TypeLambda =
+ TypeLambda(paramNames, variances)(
+ x => paramBounds mapConserve (_.subst(this, x).bounds),
+ x => resType.subst(this, x))
+
+ override def toString = s"TypeLambda($variances, $paramNames, $paramBounds, $resType)"
+
+ override def computeHash = doHash(variances ::: paramNames, resType, paramBounds)
+ }
+
+ /** The parameter of a type lambda */
+ case class LambdaParam(tl: TypeLambda, n: Int) extends TypeParamInfo {
+ def isTypeParam(implicit ctx: Context) = true
+ def paramName(implicit ctx: Context): TypeName = tl.paramNames(n)
+ def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n)
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = paramBounds
+ def paramVariance(implicit ctx: Context): Int = tl.variances(n)
+ def toArg: Type = PolyParam(tl, n)
+ def paramRef(implicit ctx: Context): Type = PolyParam(tl, n)
+ }
+
+ object TypeLambda {
+ def apply(paramNames: List[TypeName], variances: List[Int])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)(implicit ctx: Context): TypeLambda = {
+ unique(new TypeLambda(paramNames, variances)(paramBoundsExp, resultTypeExp))
+ }
+
+ def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) =
+ if (tparams.isEmpty) resultType
+ else apply(tparams map (_.name.asTypeName), tparams.map(_.variance))(
+ pt => tparams.map(tparam => pt.lifted(tparams, tparam.info).bounds),
+ pt => pt.lifted(tparams, resultType))
+ def unapply(tl: TypeLambda): Some[(List[LambdaParam], Type)] =
+ Some((tl.typeParams, tl.resType))
+
+ def any(n: Int)(implicit ctx: Context) =
+ apply(tpnme.syntheticLambdaParamNames(n), List.fill(n)(0))(
+ pt => List.fill(n)(TypeBounds.empty), pt => defn.AnyType)
+ }
+
+ /** A higher kinded type application `C[T_1, ..., T_n]` */
+ abstract case class HKApply(tycon: Type, args: List[Type])
+ extends CachedProxyType with ValueType {
+
+ private var validSuper: Period = Nowhere
+ private var cachedSuper: Type = _
+
+ override def underlying(implicit ctx: Context): Type = tycon
+
+ override def superType(implicit ctx: Context): Type = {
+ if (ctx.period != validSuper) {
+ cachedSuper = tycon match {
+ case tp: TypeLambda => defn.AnyType
+ case tp: TypeProxy => tp.superType.applyIfParameterized(args)
+ case _ => defn.AnyType
+ }
+ validSuper = ctx.period
}
+ cachedSuper
+ }
+
+ /* (Not needed yet) */
+ def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match {
+ case tycon: TypeRef =>
+ tycon.info match {
+ case TypeBounds(lo, hi) =>
+ if (lo eq hi) superType // optimization, can profit from caching in this case
+ else lo.applyIfParameterized(args)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ def typeParams(implicit ctx: Context): List[TypeParamInfo] = {
+ val tparams = tycon.typeParams
+ if (tparams.isEmpty) TypeLambda.any(args.length).typeParams else tparams
+ }
+
+ def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type =
+ if ((tycon eq this.tycon) && (args eq this.args)) this
+ else tycon.appliedTo(args)
+
+ override def computeHash = doHash(tycon, args)
+
+ protected def checkInst(implicit ctx: Context): this.type = {
+ def check(tycon: Type): Unit = tycon.stripTypeVar match {
+ case tycon: TypeRef if !tycon.symbol.isClass =>
+ case _: PolyParam | ErrorType | _: WildcardType =>
+ case _: TypeLambda =>
+ assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this")
+ case tycon: AnnotatedType =>
+ check(tycon.underlying)
+ case _ =>
+ assert(false, s"illegal type constructor in $this")
+ }
+ if (Config.checkHKApplications) check(tycon)
+ this
+ }
+ }
+
+ final class CachedHKApply(tycon: Type, args: List[Type]) extends HKApply(tycon, args)
+
+ object HKApply {
+ def apply(tycon: Type, args: List[Type])(implicit ctx: Context) =
+ unique(new CachedHKApply(tycon, args)).checkInst
}
- // ----- Bound types: MethodParam, PolyParam, RefinedThis --------------------------
+ // ----- Bound types: MethodParam, PolyParam --------------------------
abstract class BoundType extends CachedProxyType with ValueType {
type BT <: Type
@@ -2522,8 +2747,8 @@ object Types {
}
/** TODO Some docs would be nice here! */
- case class PolyParam(binder: PolyType, paramNum: Int) extends ParamType {
- type BT = PolyType
+ case class PolyParam(binder: GenericType, paramNum: Int) extends ParamType {
+ type BT = GenericType
def copyBoundType(bt: BT) = PolyParam(bt, paramNum)
/** Looking only at the structure of `bound`, is one of the following true?
@@ -2541,9 +2766,17 @@ object Types {
def paramName = binder.paramNames(paramNum)
- override def underlying(implicit ctx: Context): Type = binder.paramBounds(paramNum)
+ override def underlying(implicit ctx: Context): Type = {
+ val bounds = binder.paramBounds
+ if (bounds == null) NoType // this can happen if the referenced generic type is not initialized yet
+ else bounds(paramNum)
+ }
// no customized hashCode/equals needed because cycle is broken in PolyType
- override def toString = s"PolyParam($paramName)"
+ override def toString =
+ try s"PolyParam($paramName)"
+ catch {
+ case ex: IndexOutOfBoundsException => s"PolyParam(<bad index: $paramNum>)"
+ }
override def computeHash = doHash(paramNum, binder.identityHash)
@@ -2555,20 +2788,24 @@ object Types {
}
}
- /** a this-reference to an enclosing refined type `binder`. */
- case class RefinedThis(binder: RefinedType) extends BoundType with SingletonType {
- type BT = RefinedType
+ /** a self-reference to an enclosing recursive type. */
+ case class RecThis(binder: RecType) extends BoundType with SingletonType {
+ type BT = RecType
override def underlying(implicit ctx: Context) = binder
- def copyBoundType(bt: BT) = RefinedThis(bt)
+ def copyBoundType(bt: BT) = RecThis(bt)
- // need to customize hashCode and equals to prevent infinite recursion for
- // refinements that refer to the refinement type via this
+ // need to customize hashCode and equals to prevent infinite recursion
+ // between RecTypes and RecRefs.
override def computeHash = addDelta(binder.identityHash, 41)
override def equals(that: Any) = that match {
- case that: RefinedThis => this.binder eq that.binder
+ case that: RecThis => this.binder eq that.binder
case _ => false
}
- override def toString = s"RefinedThis(${binder.hashCode})"
+ override def toString =
+ try s"RecThis(${binder.hashCode})"
+ catch {
+ case ex: NullPointerException => s"RecThis(<under construction>)"
+ }
}
// ----- Skolem types -----------------------------------------------
@@ -2580,7 +2817,14 @@ object Types {
if (info eq this.info) this else SkolemType(info)
override def hashCode: Int = identityHash
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
- override def toString = s"Skolem($info)"
+
+ private var myRepr: String = null
+ def repr(implicit ctx: Context) = {
+ if (myRepr == null) myRepr = ctx.freshName("?")
+ myRepr
+ }
+
+ override def toString = s"Skolem($hashCode)"
}
final class CachedSkolemType(info: Type) extends SkolemType(info)
@@ -2653,53 +2897,11 @@ object Types {
* is also a singleton type.
*/
def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = {
- def upperBound = ctx.typerState.constraint.fullUpperBound(origin)
- def isSingleton(tp: Type): Boolean = tp match {
- case tp: SingletonType => true
- case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
- case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
- case _ => false
- }
- def isFullyDefined(tp: Type): Boolean = tp match {
- case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
- case tp: TypeProxy => isFullyDefined(tp.underlying)
- case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
- case _ => true
- }
- def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
- case tp: OrType => true
- case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
- case RefinedType(parent, _) => isOrType(parent)
- case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
- case _ => false
- }
-
- // First, solve the constraint.
- var inst = ctx.typeComparer.approximation(origin, fromBelow)
-
- // Then, approximate by (1.) - (3.) and simplify as follows.
- // 1. If instance is from below and is a singleton type, yet
- // upper bound is not a singleton type, widen the instance.
- if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
- inst = inst.widen
-
- inst = inst.simplified
-
- // 2. If instance is from below and is a fully-defined union type, yet upper bound
- // is not a union type, approximate the union type from above by an intersection
- // of all common base types.
- if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
- inst = inst.approximateUnion
-
- // 3. If instance is from below, and upper bound has open named parameters
- // make sure the instance has all named parameters of the bound.
- if (fromBelow) inst = inst.widenToNamedTypeParams(this.namedTypeParams)
-
+ val inst = ctx.typeComparer.instanceType(origin, fromBelow)
if (ctx.typerState.isGlobalCommittable)
assert(!inst.isInstanceOf[PolyParam], i"bad inst $this := $inst, constr = ${ctx.typerState.constraint}")
// If this fails, you might want to turn on Config.debugCheckConstraintsClosed
// to help find the root of the problem.
-
instantiateWith(inst)
}
@@ -2909,9 +3111,10 @@ object Types {
/** If this type and that type have the same variance, this variance, otherwise 0 */
final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2
+ override def computeHash = doHash(variance, lo, hi)
override def equals(that: Any): Boolean = that match {
case that: TypeBounds =>
- (this.lo eq that.lo) && (this.hi eq that.hi) && this.variance == that.variance
+ (this.lo eq that.lo) && (this.hi eq that.hi) && (this.variance == that.variance)
case _ =>
false
}
@@ -2920,9 +3123,7 @@ object Types {
if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)"
}
- class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) {
- override def computeHash = doHash(variance, lo, hi)
- }
+ class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) {
/** pre: this is a type alias */
@@ -2952,7 +3153,6 @@ object Types {
class CachedTypeAlias(alias: Type, variance: Int, hc: Int) extends TypeAlias(alias, variance) {
myHash = hc
- override def computeHash = doHash(variance, lo, hi)
}
object TypeBounds {
@@ -3024,7 +3224,9 @@ object Types {
/** Wildcard type, possibly with bounds */
abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
def derivedWildcardType(optBounds: Type)(implicit ctx: Context) =
- if (optBounds eq this.optBounds) this else WildcardType(optBounds.asInstanceOf[TypeBounds])
+ if (optBounds eq this.optBounds) this
+ else if (!optBounds.exists) WildcardType
+ else WildcardType(optBounds.asInstanceOf[TypeBounds])
override def computeHash = doHash(optBounds)
}
@@ -3110,16 +3312,18 @@ object Types {
tp.derivedSelect(pre)
protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type =
tp.derivedRefinedType(parent, tp.refinedName, info)
+ protected def derivedRecType(tp: RecType, parent: Type): Type =
+ tp.rebind(parent)
protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type =
tp.derivedTypeAlias(alias)
protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type =
tp.derivedTypeBounds(lo, hi)
protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type =
tp.derivedSuperType(thistp, supertp)
+ protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ tp.derivedAppliedType(tycon, args)
protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type =
tp.derivedAndOrType(tp1, tp2)
- protected def derivedSkolemType(tp: SkolemType, info: Type): Type =
- tp.derivedSkolemType(info)
protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type =
tp.derivedAnnotatedType(underlying, annot)
protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type =
@@ -3132,8 +3336,8 @@ object Types {
tp.derivedMethodType(tp.paramNames, formals, restpe)
protected def derivedExprType(tp: ExprType, restpe: Type): Type =
tp.derivedExprType(restpe)
- protected def derivedPolyType(tp: PolyType, pbounds: List[TypeBounds], restpe: Type): Type =
- tp.derivedPolyType(tp.paramNames, pbounds, restpe)
+ protected def derivedGenericType(tp: GenericType, pbounds: List[TypeBounds], restpe: Type): Type =
+ tp.derivedGenericType(tp.paramNames, pbounds, restpe)
/** Map this function over given type */
def mapOver(tp: Type): Type = {
@@ -3175,15 +3379,18 @@ object Types {
case tp: ExprType =>
derivedExprType(tp, this(tp.resultType))
- case tp: PolyType =>
+ case tp: GenericType =>
def mapOverPoly = {
variance = -variance
val bounds1 = tp.paramBounds.mapConserve(this).asInstanceOf[List[TypeBounds]]
variance = -variance
- derivedPolyType(tp, bounds1, this(tp.resultType))
+ derivedGenericType(tp, bounds1, this(tp.resultType))
}
mapOverPoly
+ case tp: RecType =>
+ derivedRecType(tp, this(tp.parent))
+
case tp @ SuperType(thistp, supertp) =>
derivedSuperType(tp, this(thistp), this(supertp))
@@ -3197,11 +3404,21 @@ object Types {
val inst = tp.instanceOpt
if (inst.exists) apply(inst) else tp
+ case tp: HKApply =>
+ def mapArg(arg: Type, tparam: TypeParamInfo): Type = {
+ val saved = variance
+ variance *= tparam.paramVariance
+ try this(arg)
+ finally variance = saved
+ }
+ derivedAppliedType(tp, this(tp.tycon),
+ tp.args.zipWithConserve(tp.typeParams)(mapArg))
+
case tp: AndOrType =>
derivedAndOrType(tp, this(tp.tp1), this(tp.tp2))
case tp: SkolemType =>
- derivedSkolemType(tp, this(tp.info))
+ tp
case tp @ AnnotatedType(underlying, annot) =>
val underlying1 = this(underlying)
@@ -3285,6 +3502,9 @@ object Types {
override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type) =
if (parent.exists && info.exists) tp.derivedRefinedType(parent, tp.refinedName, info)
else approx(hi = parent)
+ override protected def derivedRecType(tp: RecType, parent: Type) =
+ if (parent.exists) tp.rebind(parent)
+ else approx()
override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) =
if (alias.exists) tp.derivedTypeAlias(alias)
else approx(NoType, TypeBounds.empty)
@@ -3297,13 +3517,13 @@ object Types {
override protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type) =
if (thistp.exists && supertp.exists) tp.derivedSuperType(thistp, supertp)
else NoType
+ override protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ if (tycon.exists && args.forall(_.exists)) tp.derivedAppliedType(tycon, args)
+ else approx() // This is rather coarse, but to do better is a bit complicated
override protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type) =
if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2)
else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d
else approx(lo = tp1 & tp2)
- override protected def derivedSkolemType(tp: SkolemType, info: Type) =
- if (info.exists) tp.derivedSkolemType(info)
- else NoType
override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation) =
if (underlying.exists) tp.derivedAnnotatedType(underlying, annot)
else NoType
@@ -3377,18 +3597,38 @@ object Types {
case ExprType(restpe) =>
this(x, restpe)
- case tp @ PolyType(pnames) =>
+ case tp: GenericType =>
variance = -variance
val y = foldOver(x, tp.paramBounds)
variance = -variance
this(y, tp.resultType)
+ case tp: RecType =>
+ this(x, tp.parent)
+
case SuperType(thistp, supertp) =>
this(this(x, thistp), supertp)
case tp @ ClassInfo(prefix, _, _, _, _) =>
this(x, prefix)
+ case tp @ HKApply(tycon, args) =>
+ def foldArgs(x: T, tparams: List[TypeParamInfo], args: List[Type]): T =
+ if (args.isEmpty) {
+ assert(tparams.isEmpty)
+ x
+ }
+ else {
+ val tparam = tparams.head
+ val saved = variance
+ variance *= tparam.paramVariance
+ val acc =
+ try this(x, args.head)
+ finally variance = saved
+ foldArgs(acc, tparams.tail, args.tail)
+ }
+ foldArgs(this(x, tycon), tp.typeParams, args)
+
case tp: AndOrType =>
this(this(x, tp.tp1), tp.tp2)
@@ -3407,6 +3647,9 @@ object Types {
case tp: JavaArrayType =>
this(x, tp.elemType)
+ case tp: LazyRef =>
+ this(x, tp.ref)
+
case tp: ProtoType =>
tp.fold(x, this)
diff --git a/src/dotty/tools/dotc/core/Uniques.scala b/src/dotty/tools/dotc/core/Uniques.scala
index b00508d60..cb9670c69 100644
--- a/src/dotty/tools/dotc/core/Uniques.scala
+++ b/src/dotty/tools/dotc/core/Uniques.scala
@@ -107,8 +107,8 @@ object Uniques {
def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
val h = doHash(refinedName, refinedInfo, parent)
- def newType = new PreHashedRefinedType(parent, refinedName, refinedInfo, h)
- if (monitored) recordCaching(h, classOf[PreHashedRefinedType])
+ def newType = new CachedRefinedType(parent, refinedName, refinedInfo, h)
+ if (monitored) recordCaching(h, classOf[CachedRefinedType])
if (h == NotCached) newType
else {
val r = findPrevious(h, parent, refinedName, refinedInfo)
diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index f7a69aa53..813376655 100644
--- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -272,7 +272,7 @@ class ClassfileParser(
if (sig(index) == '<') {
accept('<')
var tp1: Type = tp
- var formals = tp.typeParams
+ var formals = tp.typeParamSymbols
while (sig(index) != '>') {
sig(index) match {
case variance @ ('+' | '-' | '*') =>
@@ -389,7 +389,7 @@ class ClassfileParser(
}
index += 1
}
- val ownTypeParams = newTParams.toList
+ val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]]
val tpe =
if ((owner == null) || !owner.isClass)
sig2type(tparams, skiptvs = false)
@@ -584,7 +584,7 @@ class ClassfileParser(
* a vararg argument. We solve this by creating two constructors, one with
* an array, the other with a repeated parameter.
*/
- def addAnnotationConstructor(classInfo: Type, tparams: List[Symbol] = Nil)(implicit ctx: Context): Unit = {
+ def addAnnotationConstructor(classInfo: Type, tparams: List[TypeSymbol] = Nil)(implicit ctx: Context): Unit = {
def addDefaultGetter(attr: Symbol, n: Int) =
ctx.newSymbol(
owner = moduleRoot.symbol,
diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
index 221170622..394d8f11a 100644
--- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
+++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -103,7 +103,7 @@ Standard-Section: "ASTs" TopLevelStat*
TERMREFpkg fullyQualified_NameRef
TERMREF possiblySigned_NameRef qual_Type
THIS clsRef_Type
- REFINEDthis refinedType_ASTRef
+ RECthis recType_ASTRef
SHARED path_ASTRef
Constant = UNITconst
@@ -126,6 +126,7 @@ Standard-Section: "ASTs" TopLevelStat*
TYPEREFsymbol sym_ASTRef qual_Type
TYPEREFpkg fullyQualified_NameRef
TYPEREF possiblySigned_NameRef qual_Type
+ RECtype parent_Type
SUPERtype Length this_Type underlying_Type
REFINEDtype Length underlying_Type refinement_NameRef info_Type
APPLIEDtype Length tycon_Type arg_Type*
@@ -137,6 +138,7 @@ Standard-Section: "ASTs" TopLevelStat*
BIND Length boundName_NameRef bounds_Type
// for type-variables defined in a type pattern
BYNAMEtype underlying_Type
+ LAMBDAtype Length result_Type NamesTypes // variance encoded in front of name: +/-/=
POLYtype Length result_Type NamesTypes // needed for refinements
METHODtype Length result_Type NamesTypes // needed for refinements
PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
@@ -258,7 +260,7 @@ object TastyFormat {
final val TYPEREFdirect = 66
final val TERMREFpkg = 67
final val TYPEREFpkg = 68
- final val REFINEDthis = 69
+ final val RECthis = 69
final val BYTEconst = 70
final val SHORTconst = 71
final val CHARconst = 72
@@ -277,6 +279,7 @@ object TastyFormat {
final val IMPLICITarg = 101
final val PRIVATEqualified = 102
final val PROTECTEDqualified = 103
+ final val RECtype = 104
final val IDENT = 112
final val SELECT = 113
@@ -324,7 +327,8 @@ object TastyFormat {
final val ORtype = 172
final val METHODtype = 174
final val POLYtype = 175
- final val PARAMtype = 176
+ final val LAMBDAtype = 176
+ final val PARAMtype = 177
final val ANNOTATION = 178
final val firstSimpleTreeTag = UNITconst
@@ -417,7 +421,7 @@ object TastyFormat {
case TYPEREFdirect => "TYPEREFdirect"
case TERMREFpkg => "TERMREFpkg"
case TYPEREFpkg => "TYPEREFpkg"
- case REFINEDthis => "REFINEDthis"
+ case RECthis => "RECthis"
case BYTEconst => "BYTEconst"
case SHORTconst => "SHORTconst"
case CHARconst => "CHARconst"
@@ -426,6 +430,7 @@ object TastyFormat {
case FLOATconst => "FLOATconst"
case DOUBLEconst => "DOUBLEconst"
case STRINGconst => "STRINGconst"
+ case RECtype => "RECtype"
case IDENT => "IDENT"
case SELECT => "SELECT"
@@ -496,4 +501,8 @@ object TastyFormat {
case POLYtype | METHODtype => -1
case _ => 0
}
+
+ /** Map between variances and name prefixes */
+ val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+')
+ val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1)
}
diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
index 37b9341eb..be3999533 100644
--- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -76,6 +76,10 @@ class TreePickler(pickler: TastyPickler) {
case Some(label) =>
if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
case None =>
+ // See pos/t1957.scala for an example where this can happen.
+ // I believe it's a bug in typer: the type of an implicit argument refers
+ // to a closure parameter outside the closure itself. TODO: track this down, so that we
+ // can eliminate this case.
ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
pickleForwardSymRef(sym)
}
@@ -154,7 +158,7 @@ class TreePickler(pickler: TastyPickler) {
case ConstantType(value) =>
pickleConstant(value)
case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
- pickleType(tpe.info.bounds.hi)
+ pickleType(tpe.superType)
case tpe: WithFixedSym =>
val sym = tpe.symbol
def pickleRef() =
@@ -207,8 +211,8 @@ class TreePickler(pickler: TastyPickler) {
case tpe: SuperType =>
writeByte(SUPERtype)
withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
- case tpe: RefinedThis =>
- writeByte(REFINEDthis)
+ case tpe: RecThis =>
+ writeByte(RECthis)
val binderAddr = pickledTypes.get(tpe.binder)
assert(binderAddr != null, tpe.binder)
writeRef(binderAddr.asInstanceOf[Addr])
@@ -221,6 +225,9 @@ class TreePickler(pickler: TastyPickler) {
pickleType(tpe.parent)
pickleType(tpe.refinedInfo, richTypes = true)
}
+ case tpe: RecType =>
+ writeByte(RECtype)
+ pickleType(tpe.parent)
case tpe: TypeAlias =>
writeByte(TYPEALIAS)
withLength {
@@ -243,6 +250,11 @@ class TreePickler(pickler: TastyPickler) {
case tpe: ExprType =>
writeByte(BYNAMEtype)
pickleType(tpe.underlying)
+ case tpe: TypeLambda =>
+ writeByte(LAMBDAtype)
+ val paramNames = tpe.typeParams.map(tparam =>
+ varianceToPrefix(tparam.paramVariance) +: tparam.paramName)
+ pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds)
case tpe: MethodType if richTypes =>
writeByte(METHODtype)
pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
index 91ac4ea3e..31247c005 100644
--- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -197,9 +197,9 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
// ------ Reading types -----------------------------------------------------
/** Read names in an interleaved sequence of (parameter) names and types/bounds */
- def readParamNames[N <: Name](end: Addr): List[N] =
+ def readParamNames(end: Addr): List[Name] =
until(end) {
- val name = readName().asInstanceOf[N]
+ val name = readName()
skipTree()
name
}
@@ -244,11 +244,11 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
def readLengthType(): Type = {
val end = readEnd()
- def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
+ def readNamesSkipParams: (List[Name], TreeReader) = {
val nameReader = fork
nameReader.skipTree() // skip result
val paramReader = nameReader.fork
- (nameReader.readParamNames[N](end), paramReader)
+ (nameReader.readParamNames(end), paramReader)
}
val result =
@@ -260,7 +260,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val parent = readType()
val ttag = nextUnsharedTag
if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
- RefinedType(parent, name, rt => registeringType(rt, readType()))
+ RefinedType(parent, name, readType())
// Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
// Eta expansion of the latter puts readType() out of the expression.
case APPLIEDtype =>
@@ -284,22 +284,31 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
registerSym(start, sym)
TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case LAMBDAtype =>
+ val (rawNames, paramReader) = readNamesSkipParams
+ val (variances, paramNames) = rawNames
+ .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip
+ val result = TypeLambda(paramNames, variances)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
case POLYtype =>
- val (names, paramReader) = readNamesSkipParams[TypeName]
- val result = PolyType(names)(
+ val (names, paramReader) = readNamesSkipParams
+ val result = PolyType(names.map(_.toTypeName))(
pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
pt => readType())
goto(end)
result
case METHODtype =>
- val (names, paramReader) = readNamesSkipParams[TermName]
- val result = MethodType(names, paramReader.readParamTypes[Type](end))(
+ val (names, paramReader) = readNamesSkipParams
+ val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))(
mt => registeringType(mt, readType()))
goto(end)
result
case PARAMtype =>
readTypeRef() match {
- case binder: PolyType => PolyParam(binder, readNat())
+ case binder: GenericType => PolyParam(binder, readNat())
case binder: MethodType => MethodParam(binder, readNat())
}
case CLASSconst =>
@@ -322,8 +331,6 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
readPackageRef().termRef
case TYPEREF =>
val name = readName().toTypeName
- if (name.isLambdaTraitName) // Make sure curresponding lambda trait exists
- defn.LambdaTrait(name.lambdaTraitVariances)
TypeRef(readType(), name)
case TERMREF =>
readNameSplitSig() match {
@@ -332,8 +339,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
case THIS =>
ThisType.raw(readType().asInstanceOf[TypeRef])
- case REFINEDthis =>
- RefinedThis(readTypeRef().asInstanceOf[RefinedType])
+ case RECtype =>
+ RecType(rt => registeringType(rt, readType()))
+ case RECthis =>
+ RecThis(readTypeRef().asInstanceOf[RecType])
case SHARED =>
val ref = readAddr()
typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
index 71a919ca3..3dbeb4040 100644
--- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
+++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -31,7 +31,7 @@ object Scala2Unpickler {
/** Exception thrown if classfile is corrupted */
class BadSignature(msg: String) extends RuntimeException(msg)
- case class TempPolyType(tparams: List[Symbol], tpe: Type) extends UncachedGroundType {
+ case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType {
override def fallbackToText(printer: Printer): Text =
"[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe)
}
@@ -82,8 +82,8 @@ object Scala2Unpickler {
paramNames,
paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp),
tp.resultType)
- case tp @ PolyType(paramNames) =>
- tp.derivedPolyType(paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
}
def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context) =
@@ -134,7 +134,7 @@ object Scala2Unpickler {
denot.info = ClassInfo( // final info, except possibly for typeparams ordering
denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost)
- denot.updateTypeParams(tparams)
+ denot.ensureTypeParamsInCorrectOrder()
}
}
@@ -188,8 +188,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex))
}
- private var postReadOp: Context => Unit = null
-
def run()(implicit ctx: Context) =
try {
var i = 0
@@ -197,10 +195,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
if (entries(i) == null && isSymbolEntry(i)) {
val savedIndex = readIndex
readIndex = index(i)
- entries(i) = readSymbol()
- if (postReadOp != null) {
- postReadOp(ctx)
- postReadOp = null
+ val sym = readSymbol()
+ entries(i) = sym
+ sym.infoOrCompleter match {
+ case info: ClassUnpickler => info.init()
+ case _ =>
}
readIndex = savedIndex
}
@@ -486,20 +485,20 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
}
ctx.newSymbol(owner, name1, flags1, localMemberUnpickler, coord = start)
case CLASSsym =>
- val infoRef = readNat()
- postReadOp = implicit ctx => atReadPos(index(infoRef), readTypeParams) // force reading type params early, so they get entered in the right order.
+ var infoRef = readNat()
+ if (isSymbolRef(infoRef)) infoRef = readNat()
if (isClassRoot)
completeRoot(
- classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol))
+ classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol, infoRef))
else if (isModuleClassRoot)
completeRoot(
- moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule))
+ moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef))
else if (name == tpnme.REFINE_CLASS)
// create a type alias instead
ctx.newSymbol(owner, name, flags, localMemberUnpickler, coord = start)
else {
def completer(cls: Symbol) = {
- val unpickler = new LocalUnpickler() withDecls symScope(cls)
+ val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls)
if (flags is ModuleClass)
unpickler withSourceModule (implicit ctx =>
cls.owner.info.decls.lookup(cls.name.sourceModuleName)
@@ -582,8 +581,27 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
object localMemberUnpickler extends LocalUnpickler
- def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol) =
- (new LocalUnpickler with SymbolLoaders.SecondCompleter {
+ class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter {
+ private def readTypeParams()(implicit ctx: Context): List[TypeSymbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestpeRef = readNat()
+ until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]]
+ } else Nil
+ }
+ private def loadTypeParams(implicit ctx: Context) =
+ atReadPos(index(infoRef), readTypeParams)
+
+ /** Force reading type params early, we need them in setClassInfo of subclasses. */
+ def init()(implicit ctx: Context) = loadTypeParams
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] =
+ loadTypeParams
+ }
+
+ def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int) =
+ (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter {
override def startCoord(denot: SymDenotation): Coord = start
}) withDecls symScope(cls) withSourceModule (_ => module)
@@ -620,9 +638,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
def removeSingleton(tp: Type): Type =
if (tp isRef defn.SingletonClass) defn.AnyType else tp
def elim(tp: Type): Type = tp match {
- case tp @ RefinedType(parent, name) =>
+ case tp @ RefinedType(parent, name, rinfo) =>
val parent1 = elim(tp.parent)
- tp.refinedInfo match {
+ rinfo match {
case TypeAlias(info: TypeRef) if isBound(info) =>
RefinedType(parent1, name, info.symbol.info)
case info: TypeRef if isBound(info) =>
@@ -632,8 +650,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case info =>
tp.derivedRefinedType(parent1, name, info)
}
- case tp @ TypeRef(pre, tpnme.hkApply) =>
- tp.derivedSelect(elim(pre))
+ case tp @ HKApply(tycon, args) =>
+ val tycon1 = tycon.safeDealias
+ def mapArg(arg: Type) = arg match {
+ case arg: TypeRef if isBound(arg) => arg.symbol.info
+ case _ => arg
+ }
+ if (tycon1 ne tycon) elim(tycon1.appliedTo(args))
+ else tp.derivedAppliedType(tycon, args.map(mapArg))
case _ =>
tp
}
@@ -709,7 +733,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
else TypeRef(pre, sym.name.asTypeName)
val args = until(end, readTypeRef)
if (sym == defn.ByNameParamClass2x) ExprType(args.head)
- else if (args.nonEmpty) tycon.safeAppliedTo(etaExpandIfHK(sym.typeParams, args))
+ else if (args.nonEmpty) tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args))
else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams)
else tycon
case TYPEBOUNDStpe =>
@@ -722,13 +746,12 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
val parent = parents.reduceLeft(AndType(_, _))
if (decls.isEmpty) parent
else {
- def addRefinement(tp: Type, sym: Symbol) = {
- def subst(info: Type, rt: RefinedType) =
- if (clazz.isClass) info.substThis(clazz.asClass, RefinedThis(rt))
- else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
- RefinedType(tp, sym.name, subst(sym.info, _))
- }
- (parent /: decls.toList)(addRefinement).asInstanceOf[RefinedType]
+ def subst(info: Type, rt: RecType) =
+ if (clazz.isClass) info.substThis(clazz.asClass, RecThis(rt))
+ else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
+ def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info)
+ val refined = (parent /: decls.toList)(addRefinement)
+ RecType.closeOver(rt => subst(refined, rt))
}
case CLASSINFOtpe =>
val clazz = readSymbolRef()
@@ -744,7 +767,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case POLYtpe =>
val restpe = readTypeRef()
val typeParams = until(end, readSymbolRef)
- if (typeParams.nonEmpty) TempPolyType(typeParams, restpe.widenExpr)
+ if (typeParams.nonEmpty) TempPolyType(typeParams.asInstanceOf[List[TypeSymbol]], restpe.widenExpr)
else ExprType(restpe)
case EXISTENTIALtpe =>
val restpe = readTypeRef()
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index ded17c67c..c535c4241 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -223,7 +223,9 @@ object Parsers {
} // DEBUG
private def expectedMsg(token: Int): String =
- showToken(token) + " expected but " + showToken(in.token) + " found."
+ expectedMessage(showToken(token))
+ private def expectedMessage(what: String): String =
+ s"$what expected but ${showToken(in.token)} found"
/** Consume one token of the specified type, or
* signal an error if it is not there.
@@ -648,6 +650,7 @@ object Parsers {
/* ------------- TYPES ------------------------------------------------------ */
/** Type ::= FunArgTypes `=>' Type
+ * | HkTypeParamClause `->' Type
* | InfixType
* FunArgTypes ::= InfixType
* | `(' [ FunArgType {`,' FunArgType } ] `)'
@@ -677,6 +680,12 @@ object Parsers {
}
}
}
+ else if (in.token == LBRACKET) {
+ val tparams = typeParamClause(ParamOwner.TypeParam)
+ if (isIdent && in.name.toString == "->")
+ atPos(in.skipToken())(TypeLambdaTree(tparams, typ()))
+ else { syntaxErrorOrIncomplete(expectedMessage("`->'")); typ() }
+ }
else infixType()
in.token match {
@@ -1542,7 +1551,7 @@ object Parsers {
* TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds
*
* HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
- * HkTypeParam ::= {Annotation} ['+' | `-'] (Id | _') TypeBounds
+ * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds
*/
def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
def typeParam(): TypeDef = {
@@ -1575,9 +1584,7 @@ object Parsers {
in.nextToken()
ctx.freshName(nme.USCORE_PARAM_PREFIX).toTypeName
}
- val hkparams =
- if (ownerKind == ParamOwner.TypeParam) Nil
- else typeParamClauseOpt(ParamOwner.TypeParam)
+ val hkparams = typeParamClauseOpt(ParamOwner.TypeParam)
val bounds =
if (isConcreteOwner) typeParamBounds(name)
else typeBounds()
@@ -2129,17 +2136,10 @@ object Parsers {
var exitOnError = false
while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
setLastStatOffset()
- if (in.token == IMPORT) {
+ if (in.token == IMPORT)
stats ++= importClause()
- }
- else if (isExprIntro) {
- val t = expr(Location.InBlock)
- stats += t
- t match {
- case _: Function => return stats.toList
- case _ =>
- }
- }
+ else if (isExprIntro)
+ stats += expr(Location.InBlock)
else if (isDefIntro(localModifierTokens))
if (in.token == IMPLICIT) {
val start = in.skipToken()
diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala
index 1e2ba0b4d..acf4514ea 100644
--- a/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -6,6 +6,7 @@ import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, De
import Contexts.Context, Scopes.Scope, Denotations.Denotation, Annotations.Annotation
import StdNames.{nme, tpnme}
import ast.Trees._, ast._
+import config.Config
import java.lang.Integer.toOctalString
import config.Config.summarizeDepth
import scala.annotation.switch
@@ -13,6 +14,8 @@ import scala.annotation.switch
class PlainPrinter(_ctx: Context) extends Printer {
protected[this] implicit def ctx: Context = _ctx.addMode(Mode.Printing)
+ private var openRecs: List[RecType] = Nil
+
protected def maxToTextRecursions = 100
protected final def controlled(op: => Text): Text =
@@ -48,9 +51,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
homogenize(tp1) & homogenize(tp2)
case OrType(tp1, tp2) =>
homogenize(tp1) | homogenize(tp2)
- case tp @ TypeRef(_, tpnme.hkApply) =>
- val tp1 = tp.reduceProjection
- if (tp1 eq tp) tp else homogenize(tp1)
+ case tp: SkolemType =>
+ homogenize(tp.info)
case tp: LazyRef =>
homogenize(tp.ref)
case _ =>
@@ -58,6 +60,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
}
else tp
+ private def selfRecName(n: Int) = s"z$n"
+
/** Render elements alternating with `sep` string */
protected def toText(elems: Traversable[Showable], sep: String) =
Text(elems map (_ toText this), sep)
@@ -105,12 +109,36 @@ class PlainPrinter(_ctx: Context) extends Printer {
protected def toTextRefinement(rt: RefinedType) =
(refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close
+ protected def argText(arg: Type): Text = arg match {
+ case arg: TypeBounds => "_" ~ toTextGlobal(arg)
+ case _ => toTextGlobal(arg)
+ }
+
+ /** The text for a TypeLambda
+ *
+ * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T
+ *
+ * where
+ * @param paramNames = p_1, ..., p_n
+ * @param variances = v_1, ..., v_n
+ * @param argBoundss = B_1, ..., B_n
+ * @param body = T
+ */
+ protected def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = {
+ def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text =
+ varianceString(variance) ~ name ~ toText(bounds)
+ changePrec(GlobalPrec) {
+ "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~
+ "] -> " ~ toTextGlobal(body)
+ }
+ }
+
/** The longest sequence of refinement types, starting at given type
* and following parents.
*/
private def refinementChain(tp: Type): List[Type] =
tp :: (tp match {
- case RefinedType(parent, _) => refinementChain(parent.stripTypeVar)
+ case tp: RefinedType => refinementChain(tp.parent.stripTypeVar)
case _ => Nil
})
@@ -130,6 +158,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
val parent :: (refined: List[RefinedType @unchecked]) =
refinementChain(tp).reverse
toTextLocal(parent) ~ "{" ~ Text(refined map toTextRefinement, "; ").close ~ "}"
+ case tp: RecType =>
+ try {
+ openRecs = tp :: openRecs
+ "{" ~ selfRecName(openRecs.length) ~ " => " ~ toTextGlobal(tp.parent) ~ "}"
+ }
+ finally openRecs = openRecs.tail
case AndType(tp1, tp2) =>
changePrec(AndPrec) { toText(tp1) ~ " & " ~ toText(tp2) }
case OrType(tp1, tp2) =>
@@ -151,6 +185,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
}
case tp: ExprType =>
changePrec(GlobalPrec) { "=> " ~ toText(tp.resultType) }
+ case tp: TypeLambda =>
+ typeLambdaText(tp.paramNames.map(_.toString), tp.variances, tp.paramBounds, tp.resultType)
case tp: PolyType =>
def paramText(name: TypeName, bounds: TypeBounds) =
toText(polyParamName(name)) ~ polyHash(tp) ~ toText(bounds)
@@ -163,6 +199,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
toText(polyParamName(pt.paramNames(n))) ~ polyHash(pt)
case AnnotatedType(tpe, annot) =>
toTextLocal(tpe) ~ " " ~ toText(annot)
+ case HKApply(tycon, args) =>
+ toTextLocal(tycon) ~ "[" ~ Text(args.map(argText), ", ") ~ "]"
case tp: TypeVar =>
if (tp.isInstantiated)
toTextLocal(tp.instanceOpt) ~ "'" // debug for now, so that we can see where the TypeVars are.
@@ -175,7 +213,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
else toText(tp.origin)
}
case tp: LazyRef =>
- "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")"
+ "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")" // TODO: only print this during debug mode?
case _ =>
tp.fallbackToText(this)
}
@@ -189,8 +227,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
protected def simpleNameString(sym: Symbol): String = nameString(sym.name)
/** If -uniqid is set, the hashcode of the polytype, after a # */
- protected def polyHash(pt: PolyType): Text =
- "#" + pt.hashCode provided ctx.settings.uniqid.value
+ protected def polyHash(pt: GenericType): Text =
+ if (ctx.settings.uniqid.value) "#" + pt.hashCode else ""
/** If -uniqid is set, the unique id of symbol, after a # */
protected def idString(sym: Symbol): String =
@@ -232,11 +270,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
toText(value)
case MethodParam(mt, idx) =>
nameString(mt.paramNames(idx))
- case tp: RefinedThis =>
- s"${nameString(tp.binder.typeSymbol)}{...}.this"
+ case tp: RecThis =>
+ val idx = openRecs.reverse.indexOf(tp.binder)
+ if (idx >= 0) selfRecName(idx + 1)
+ else "{...}.this" // TODO move underlying type to an addendum, e.g. ... z3 ... where z3: ...
case tp: SkolemType =>
- if (homogenizedView) toText(tp.info)
- else "<unknown instance of type " ~ toTextGlobal(tp.info) ~ ">"
+ if (homogenizedView) toText(tp.info) else tp.repr
}
}
diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index 614a274b4..ca62827af 100644
--- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -12,6 +12,7 @@ import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dumm
import Trees._
import TypeApplications._
import Decorators._
+import config.Config
import scala.annotation.switch
import language.implicitConversions
@@ -94,10 +95,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
override def toText(tp: Type): Text = controlled {
- def argText(arg: Type): Text = arg match {
- case arg: TypeBounds => "_" ~ toTextGlobal(arg)
- case _ => toTextGlobal(arg)
- }
def toTextTuple(args: List[Type]): Text =
"(" ~ toTextGlobal(args, ", ") ~ ")"
def toTextFunction(args: List[Type]): Text =
@@ -116,35 +113,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
if (defn.isFunctionClass(cls)) return toTextFunction(args)
if (defn.isTupleClass(cls)) return toTextTuple(args)
return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close
- case tp @ TypeLambda(variances, argBoundss, body) =>
- val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar
- val paramNames = variances.indices.toList.map(prefix.toString + _)
- val instantiate = new TypeMap {
- def contains(tp1: Type, tp2: Type): Boolean =
- tp1.eq(tp2) || {
- tp1.stripTypeVar match {
- case RefinedType(parent, _) => contains(parent, tp2)
- case _ => false
- }
- }
- def apply(t: Type): Type = t match {
- case TypeRef(RefinedThis(rt), name) if name.isHkArgName && contains(tp, rt) =>
- // Make up a name that prints as "Xi". Need to be careful we do not
- // accidentally unique-hash to something else. That's why we can't
- // use prefix = NoPrefix or a WithFixedSym instance.
- TypeRef.withSymAndName(
- defn.EmptyPackageClass.thisType, defn.AnyClass,
- paramNames(name.hkArgIndex).toTypeName)
- case _ =>
- mapOver(t)
- }
- }
- val instArgs = argBoundss.map(instantiate).asInstanceOf[List[TypeBounds]]
- val instBody = instantiate(body).dropAlias
- lambdaNestingLevel += 1
- try
- return typeLambdaText(paramNames, variances, instArgs, instBody)
- finally lambdaNestingLevel -=1
case tp: TypeRef =>
val hideType = tp.symbol is AliasPreferred
if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) {
@@ -184,30 +152,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
def blockText[T >: Untyped](trees: List[Tree[T]]): Text =
"{" ~ toText(trees, "\n") ~ "}"
- /** The text for a TypeLambda
- *
- * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T
- *
- * where
- * @param paramNames = p_1, ..., p_n
- * @param variances = v_1, ..., v_n
- * @param argBoundss = B_1, ..., B_n
- * @param body = T
- */
- def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = {
- def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text =
- varianceString(variance) ~ name ~ toText(bounds)
- changePrec(GlobalPrec) {
- "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~
- "] -> " ~ toTextGlobal(body)
- }
- }
-
override def toText[T >: Untyped](tree: Tree[T]): Text = controlled {
import untpd.{modsDeco => _, _}
- /** Print modifiers form symbols if tree has type, overriding the untpd behavior. */
+ /** Print modifiers from symbols if tree has type, overriding the untpd behavior. */
implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco =
tpd.modsDeco(mdef.asInstanceOf[tpd.MemberDef]).asInstanceOf[untpd.ModsDeco]
@@ -264,6 +213,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw)
}
+ def varianceText(mods: untpd.Modifiers) =
+ if (mods is Covariant) "+"
+ else if (mods is Contravariant) "-"
+ else ""
+
def argText(arg: Tree): Text = arg match {
case arg: TypeBoundsTree => "_" ~ toTextGlobal(arg)
case arg: TypeTree =>
@@ -308,7 +262,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
def toTextPackageId(pid: Tree): Text =
- if (homogenizedView) toTextLocal(pid.tpe)
+ if (homogenizedView && pid.hasType) toTextLocal(pid.tpe)
else toTextLocal(pid)
var txt: Text = tree match {
@@ -345,7 +299,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
"new " ~ {
tpt match {
case tpt: Template => toTextTemplate(tpt, ofNew = true)
- case _ => toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false))
+ case _ =>
+ if (tpt.hasType)
+ toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false))
+ else
+ toTextLocal(tpt)
}
}
case Pair(l, r) =>
@@ -398,6 +356,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
toTextLocal(tpt) ~ " " ~ blockText(refines)
case AppliedTypeTree(tpt, args) =>
toTextLocal(tpt) ~ "[" ~ Text(args map argText, ", ") ~ "]"
+ case TypeLambdaTree(tparams, body) =>
+ changePrec(GlobalPrec) {
+ tparamsText(tparams) ~ " -> " ~ toText(body)
+ }
case ByNameTypeTree(tpt) =>
"=> " ~ toTextLocal(tpt)
case TypeBoundsTree(lo, hi) =>
@@ -431,7 +393,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case tree @ TypeDef(name, rhs) =>
def typeDefText(rhsText: Text) =
dclTextOr {
- modText(tree.mods, "type") ~~ nameIdText(tree) ~
+ modText(tree.mods, "type") ~~ (varianceText(tree.mods) ~ nameIdText(tree)) ~
withEnclosingDef(tree) {
val rhsText1 = if (tree.hasType) toText(tree.symbol.info) else rhsText
tparamsText(tree.tparams) ~ rhsText1
diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala
index c0a3c3dfe..437e36bb9 100644
--- a/src/dotty/tools/dotc/sbt/ExtractAPI.scala
+++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -174,9 +174,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName
- val tparams = sym.typeParams.map(tparam => apiTypeParameter(
- tparam.name.toString, tparam.variance,
- tparam.info.bounds.lo, tparam.info.bounds.lo))
+ val tparams = sym.typeParams.map(apiTypeParameter)
val structure = apiClassStructure(sym)
@@ -272,6 +270,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
def apiDef(sym: TermSymbol): api.Def = {
def paramLists(t: Type, start: Int = 0): List[api.ParameterList] = t match {
+ case pt: PolyType =>
+ assert(start == 0)
+ paramLists(pt.resultType)
case mt @ MethodType(pnames, ptypes) =>
// TODO: We shouldn't have to work so hard to find the default parameters
// of a method, Dotty should expose a convenience method for that, see #1143
@@ -361,6 +362,10 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
val apiTycon = simpleType(tycon)
val apiArgs = args.map(processArg)
new api.Parameterized(apiTycon, apiArgs.toArray)
+ case TypeLambda(tparams, res) =>
+ val apiTparams = tparams.map(apiTypeParameter)
+ val apiRes = apiType(res)
+ new api.Polymorphic(apiRes, apiTparams.toArray)
case rt: RefinedType =>
val name = rt.refinedName.toString
val parent = apiType(rt.parent)
@@ -382,6 +387,13 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
Array()
}
new api.Structure(strict2lzy(Array(parent)), strict2lzy(decl), strict2lzy(Array()))
+ case tp: RecType =>
+ apiType(tp.parent)
+ case RecThis(recType) =>
+ // `tp` must be present inside `recType`, so calling `apiType` on
+ // `recType` would lead to an infinite recursion, we avoid this by
+ // computing the representation of `recType` lazily.
+ apiLazy(recType)
case tp: AndOrType =>
val parents = List(apiType(tp.tp1), apiType(tp.tp2))
@@ -403,9 +415,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
apiType(tpe)
case tp: ThisType =>
apiThis(tp.cls)
- case RefinedThis(binder) =>
- apiThis(binder.typeSymbol)
case tp: ParamType =>
+ // TODO: Distinguishing parameters based on their names alone is not enough,
+ // the binder is also needed (at least for type lambdas).
new api.ParameterRef(tp.paramName.toString)
case tp: LazyRef =>
apiType(tp.ref)
@@ -427,12 +439,23 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
Constants.emptyType
}
+ def apiLazy(tp: => Type): api.Type = {
+ // TODO: The sbt api needs a convenient way to make a lazy type.
+ // For now, we repurpose Structure for this.
+ val apiTp = lzy(Array(apiType(tp)))
+ new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array()))
+ }
+
def apiThis(sym: Symbol): api.Singleton = {
val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot)
.map(s => new api.Id(s.name.toString))
new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath)))
}
+ def apiTypeParameter(tparam: TypeParamInfo): api.TypeParameter =
+ apiTypeParameter(tparam.paramName.toString, tparam.paramVariance,
+ tparam.paramBounds.lo, tparam.paramBounds.hi)
+
def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter =
new api.TypeParameter(name, Array(), Array(), apiVariance(variance),
apiType(lo), apiType(hi))
diff --git a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
index 181d6a2d7..026a518ce 100644
--- a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
+++ b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
@@ -163,8 +163,7 @@ private class ExtractDependenciesCollector(implicit val ctx: Context) extends tp
sym.eq(NoSymbol) ||
sym.isEffectiveRoot ||
sym.isAnonymousFunction ||
- sym.isAnonymousClass ||
- sym.isLambdaTrait
+ sym.isAnonymousClass
private def addInheritanceDependency(sym: Symbol): Unit =
_topLevelInheritanceDependencies += sym.topLevelClass
diff --git a/src/dotty/tools/dotc/transform/ElimRepeated.scala b/src/dotty/tools/dotc/transform/ElimRepeated.scala
index 30778267d..258b7f234 100644
--- a/src/dotty/tools/dotc/transform/ElimRepeated.scala
+++ b/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -74,7 +74,7 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati
case SeqLiteral(elems, elemtpt) =>
JavaSeqLiteral(elems, elemtpt)
case _ =>
- val elemType = tree.tpe.firstBaseArgInfo(defn.SeqClass)
+ val elemType = tree.tpe.elemType
var elemClass = elemType.classSymbol
if (defn.PhantomClasses contains elemClass) elemClass = defn.ObjectClass
ref(defn.DottyArraysModule)
diff --git a/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/src/dotty/tools/dotc/transform/ElimStaticThis.scala
index 3afcfa685..0601e0122 100644
--- a/src/dotty/tools/dotc/transform/ElimStaticThis.scala
+++ b/src/dotty/tools/dotc/transform/ElimStaticThis.scala
@@ -27,8 +27,8 @@ class ElimStaticThis extends MiniPhaseTransform {
override def transformIdent(tree: tpd.Ident)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
if (ctx.owner.enclosingMethod.is(JavaStatic)) {
tree.tpe match {
- case TermRef(thiz: ThisType, _) if thiz.underlying.typeSymbol.is(ModuleClass) =>
- ref(thiz.underlying.typeSymbol.sourceModule).select(tree.symbol)
+ case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass) =>
+ ref(thiz.cls.sourceModule).select(tree.symbol)
case TermRef(thiz: ThisType, _) =>
assert(tree.symbol.is(Flags.JavaStatic))
tree
diff --git a/src/dotty/tools/dotc/transform/FullParameterization.scala b/src/dotty/tools/dotc/transform/FullParameterization.scala
index be64df384..d2052d8cb 100644
--- a/src/dotty/tools/dotc/transform/FullParameterization.scala
+++ b/src/dotty/tools/dotc/transform/FullParameterization.scala
@@ -95,7 +95,7 @@ trait FullParameterization {
*/
def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Type = {
val (mtparamCount, origResult) = info match {
- case info @ PolyType(mtnames) => (mtnames.length, info.resultType)
+ case info: PolyType => (info.paramNames.length, info.resultType)
case info: ExprType => (0, info.resultType)
case _ => (0, info)
}
@@ -111,18 +111,18 @@ trait FullParameterization {
}
/** Replace class type parameters by the added type parameters of the polytype `pt` */
- def mapClassParams(tp: Type, pt: PolyType): Type = {
+ def mapClassParams(tp: Type, pt: GenericType): Type = {
val classParamsRange = (mtparamCount until mtparamCount + ctparams.length).toList
tp.substDealias(ctparams, classParamsRange map (PolyParam(pt, _)))
}
/** The bounds for the added type parameters of the polytype `pt` */
- def mappedClassBounds(pt: PolyType): List[TypeBounds] =
+ def mappedClassBounds(pt: GenericType): List[TypeBounds] =
ctparams.map(tparam => mapClassParams(tparam.info, pt).bounds)
info match {
- case info @ PolyType(mtnames) =>
- PolyType(mtnames ++ ctnames)(
+ case info: PolyType =>
+ PolyType(info.paramNames ++ ctnames)(
pt =>
(info.paramBounds.map(mapClassParams(_, pt).bounds) ++
mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds),
diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala
index fcde59b24..b71284049 100644
--- a/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -13,6 +13,7 @@ import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTrans
import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
import util.Positions._
import Decorators._
+import config.Printers._
import Symbols._, TypeUtils._
/** A macro transform that runs immediately after typer and that performs the following functions:
@@ -115,6 +116,17 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
}
}
+ /** If the type of `tree` is a TermRefWithSignature with an underdefined
+ * signature, narrow the type by re-computing the signature (which should
+ * be fully-defined by now).
+ */
+ private def fixSignature[T <: Tree](tree: T)(implicit ctx: Context): T = tree.tpe match {
+ case tpe: TermRefWithSignature if tpe.signature.isUnderDefined =>
+ typr.println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}")
+ tree.withType(TermRef.withSig(tpe.prefix, tpe.name, tpe.widen.signature)).asInstanceOf[T]
+ case _ => tree
+ }
+
class PostTyperTransformer extends Transformer {
private var inJavaAnnot: Boolean = false
@@ -178,9 +190,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
}
val (tycon, args) = decompose(tree)
tycon.tpe.widen match {
- case PolyType(pnames) =>
+ case tp: PolyType =>
val (namedArgs, otherArgs) = args.partition(isNamedArg)
- val args1 = reorderArgs(pnames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs)
+ val args1 = reorderArgs(tp.paramNames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs)
TypeApply(tycon, args1).withPos(tree.pos).withType(tree.tpe)
case _ =>
tree
@@ -192,10 +204,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
case tree: Ident =>
tree.tpe match {
case tpe: ThisType => This(tpe.cls).withPos(tree.pos)
- case _ => paramFwd.adaptRef(tree)
+ case _ => paramFwd.adaptRef(fixSignature(tree))
}
case tree: Select =>
- transformSelect(paramFwd.adaptRef(tree), Nil)
+ transformSelect(paramFwd.adaptRef(fixSignature(tree)), Nil)
case tree: TypeApply =>
val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree)
Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType])
diff --git a/src/dotty/tools/dotc/transform/SuperAccessors.scala b/src/dotty/tools/dotc/transform/SuperAccessors.scala
index ae9c493ae..6af991f27 100644
--- a/src/dotty/tools/dotc/transform/SuperAccessors.scala
+++ b/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -167,12 +167,6 @@ class SuperAccessors(thisTransformer: DenotTransformer) {
val accName = sym.name.protectedAccessorName
- def isThisType(tpe: Type): Boolean = tpe match {
- case tpe: ThisType => !tpe.cls.is(PackageClass)
- case tpe: TypeProxy => isThisType(tpe.underlying)
- case _ => false
- }
-
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
val receiverType =
diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala
index a9184c7e5..aba073f3d 100644
--- a/src/dotty/tools/dotc/typer/Applications.scala
+++ b/src/dotty/tools/dotc/typer/Applications.scala
@@ -17,6 +17,7 @@ import Types._
import Decorators._
import ErrorReporting._
import Trees._
+import config.Config
import Names._
import StdNames._
import ProtoTypes._
@@ -66,7 +67,7 @@ object Applications {
if (extractorMemberType(unapplyResult, nme.isDefined, pos) isRef defn.BooleanClass) {
if (getTp.exists)
if (unapplyFn.symbol.name == nme.unapplySeq) {
- val seqArg = boundsToHi(getTp.firstBaseArgInfo(defn.SeqClass))
+ val seqArg = boundsToHi(getTp.elemType)
if (seqArg.exists) return args map Function.const(seqArg)
}
else return getUnapplySelectors(getTp, args, pos)
@@ -628,12 +629,11 @@ trait Applications extends Compatibility { self: Typer =>
def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = track("typedTypeApply") {
val isNamed = hasNamedArg(tree.args)
- var typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_))
+ val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_))
val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt))
typedFn.tpe.widen match {
case pt: PolyType =>
if (typedArgs.length <= pt.paramBounds.length && !isNamed)
- typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg)
if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) {
val arg = typedArgs.head
checkClassType(arg.tpe, arg.pos, traitReq = false, stablePrefixReq = false)
@@ -643,9 +643,6 @@ trait Applications extends Compatibility { self: Typer =>
assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
}
- def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree =
- tree.withType(tree.tpe.etaExpandIfHK(bound))
-
/** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray.
* It is performed during typer as creation of generic arrays needs a classTag.
* we rely on implicit search to find one.
@@ -741,7 +738,7 @@ trait Applications extends Compatibility { self: Typer =>
def isSubTypeOfParent(subtp: Type, tp: Type)(implicit ctx: Context): Boolean =
if (subtp <:< tp) true
else tp match {
- case RefinedType(parent, _) => isSubTypeOfParent(subtp, parent)
+ case tp: RefinedType => isSubTypeOfParent(subtp, tp.parent)
case _ => false
}
diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala
index 22d2407bc..b1cceea88 100644
--- a/src/dotty/tools/dotc/typer/Checking.scala
+++ b/src/dotty/tools/dotc/typer/Checking.scala
@@ -34,21 +34,45 @@ object Checking {
import tpd._
/** A general checkBounds method that can be used for TypeApply nodes as
- * well as for AppliedTypeTree nodes.
+ * well as for AppliedTypeTree nodes. Also checks that type arguments to
+ * *-type parameters are fully applied.
*/
- def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) =
+ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) = {
+ (args, boundss).zipped.foreach { (arg, bound) =>
+ if (!bound.isHK && arg.tpe.isHK)
+ ctx.error(d"missing type parameter(s) for $arg", arg.pos)
+ }
for ((arg, which, bound) <- ctx.boundsViolations(args, boundss, instantiate))
ctx.error(
d"Type argument ${arg.tpe} does not conform to $which bound $bound ${err.whyNoMatchStr(arg.tpe, bound)}",
arg.pos)
+ }
/** Check that type arguments `args` conform to corresponding bounds in `poly`
* Note: This does not check the bounds of AppliedTypeTrees. These
* are handled by method checkBounds in FirstTransform
*/
- def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit =
+ def checkBounds(args: List[tpd.Tree], poly: GenericType)(implicit ctx: Context): Unit =
checkBounds(args, poly.paramBounds, _.substParams(poly, _))
+ /** If type is a higher-kinded application with wildcard arguments,
+ * check that it or one of its supertypes can be reduced to a normal application.
+ * Unreducible applications correspond to general existentials, and we
+ * cannot handle those.
+ */
+ def checkWildcardHKApply(tp: Type, pos: Position)(implicit ctx: Context): Unit = tp match {
+ case tp @ HKApply(tycon, args) if args.exists(_.isInstanceOf[TypeBounds]) =>
+ tycon match {
+ case tycon: TypeLambda =>
+ ctx.errorOrMigrationWarning(
+ d"unreducible application of higher-kinded type $tycon to wildcard arguments",
+ pos)
+ case _ =>
+ checkWildcardHKApply(tp.superType, pos)
+ }
+ case _ =>
+ }
+
/** Traverse type tree, performing the following checks:
* 1. All arguments of applied type trees must conform to their bounds.
* 2. Prefixes of type selections and singleton types must be realizable.
@@ -59,15 +83,20 @@ object Checking {
case AppliedTypeTree(tycon, args) =>
// If `args` is a list of named arguments, return corresponding type parameters,
// otherwise return type parameters unchanged
- def matchNamed(tparams: List[TypeSymbol], args: List[Tree]): List[Symbol] =
- if (hasNamedArg(args))
- for (NamedArg(name, _) <- args) yield tycon.tpe.member(name).symbol
- else
- tparams
- val tparams = matchNamed(tycon.tpe.typeSymbol.typeParams, args)
- val bounds = tparams.map(tparam =>
- tparam.info.asSeenFrom(tycon.tpe.normalizedPrefix, tparam.owner.owner).bounds)
- checkBounds(args, bounds, _.substDealias(tparams, _))
+ val tparams = tycon.tpe.typeParams
+ def argNamed(tparam: TypeParamInfo) = args.find {
+ case NamedArg(name, _) => name == tparam.paramName
+ case _ => false
+ }.getOrElse(TypeTree(tparam.paramRef))
+ val orderedArgs = if (hasNamedArg(args)) tparams.map(argNamed) else args
+ val bounds = tparams.map(_.paramBoundsAsSeenFrom(tycon.tpe))
+ def instantiate(bound: Type, args: List[Type]) =
+ bound.LambdaAbstract(tparams).appliedTo(args)
+ checkBounds(orderedArgs, bounds, instantiate)
+
+ def checkValidIfHKApply(implicit ctx: Context): Unit =
+ checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos)
+ checkValidIfHKApply(ctx.addMode(Mode.AllowLambdaWildcardApply))
case Select(qual, name) if name.isTypeName =>
checkRealizable(qual.tpe, qual.pos)
case SelectFromTypeTree(qual, name) if name.isTypeName =>
@@ -172,8 +201,12 @@ object Checking {
case tp: TermRef =>
this(tp.info)
mapOver(tp)
- case tp @ RefinedType(parent, name) =>
- tp.derivedRefinedType(this(parent), name, this(tp.refinedInfo, nestedCycleOK, nestedCycleOK))
+ case tp @ RefinedType(parent, name, rinfo) =>
+ tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK))
+ case tp: RecType =>
+ tp.rebind(this(tp.parent))
+ case tp @ HKApply(tycon, args) =>
+ tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK)))
case tp @ TypeRef(pre, name) =>
try {
// A prefix is interesting if it might contain (transitively) a reference
@@ -187,7 +220,7 @@ object Checking {
case SuperType(thistp, _) => isInteresting(thistp)
case AndType(tp1, tp2) => isInteresting(tp1) || isInteresting(tp2)
case OrType(tp1, tp2) => isInteresting(tp1) && isInteresting(tp2)
- case _: RefinedType => true
+ case _: RefinedOrRecType | _: HKApply => true
case _ => false
}
if (isInteresting(pre)) {
@@ -354,7 +387,7 @@ object Checking {
// try to dealias to avoid a leak error
val savedErrors = errors
errors = prevErrors
- val tp2 = apply(tp.info.bounds.hi)
+ val tp2 = apply(tp.superType)
if (errors eq prevErrors) tp1 = tp2
else errors = savedErrors
}
@@ -433,12 +466,14 @@ trait Checking {
}
/** Check that any top-level type arguments in this type are feasible, i.e. that
- * their lower bound conforms to their upper cound. If a type argument is
+ * their lower bound conforms to their upper bound. If a type argument is
* infeasible, issue and error and continue with upper bound.
*/
def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp match {
case tp: RefinedType =>
tp.derivedRefinedType(tp.parent, tp.refinedName, checkFeasible(tp.refinedInfo, pos, where))
+ case tp: RecType =>
+ tp.rebind(tp.parent)
case tp @ TypeBounds(lo, hi) if !(lo <:< hi) =>
ctx.error(d"no type exists between low bound $lo and high bound $hi$where", pos)
TypeAlias(hi)
@@ -500,17 +535,6 @@ trait Checking {
errorTree(tpt, d"missing type parameter for ${tpt.tpe}")
}
else tpt
-
- def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) =
- if (tparams.nonEmpty)
- sym.info match {
- case info: TypeAlias => // ok
- case TypeBounds(lo, _) =>
- for (tparam <- tparams)
- if (tparam.typeRef.occursIn(lo))
- ctx.error(i"type parameter ${tparam.name} may not occur in lower bound $lo", pos)
- case _ =>
- }
}
trait NoChecking extends Checking {
@@ -524,5 +548,4 @@ trait NoChecking extends Checking {
override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = ()
override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = ()
override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt
- override def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = ()
}
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index 7de40294d..feed398aa 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -284,17 +284,23 @@ trait ImplicitRunInfo { self: RunInfo =>
override implicit protected val ctx: Context = liftingCtx
override def stopAtStatic = true
def apply(tp: Type) = tp match {
- case tp: TypeRef if tp.symbol.isLambdaTrait =>
- defn.AnyType
case tp: TypeRef if tp.symbol.isAbstractOrAliasType =>
val pre = tp.prefix
def joinClass(tp: Type, cls: ClassSymbol) =
- if (cls.isLambdaTrait) tp
- else AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner))
+ AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner))
val lead = if (tp.prefix eq NoPrefix) defn.AnyType else apply(tp.prefix)
(lead /: tp.classSymbols)(joinClass)
case tp: TypeVar =>
apply(tp.underlying)
+ case tp: HKApply =>
+ def applyArg(arg: Type) = arg match {
+ case TypeBounds(lo, hi) => AndType.make(lo, hi)
+ case _: WildcardType => defn.AnyType
+ case _ => arg
+ }
+ (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg)))
+ case tp: TypeLambda =>
+ apply(tp.resType)
case _ =>
mapOver(tp)
}
@@ -325,7 +331,7 @@ trait ImplicitRunInfo { self: RunInfo =>
}
def addParentScope(parent: TypeRef): Unit = {
iscopeRefs(parent) foreach addRef
- for (param <- parent.typeParams)
+ for (param <- parent.typeParamSymbols)
comps ++= iscopeRefs(tp.member(param.name).info)
}
val companion = cls.companionModule
@@ -766,7 +772,7 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) {
case tp: RefinedType =>
foldOver(n + 1, tp)
case tp: TypeRef if tp.info.isAlias =>
- apply(n, tp.info.bounds.hi)
+ apply(n, tp.superType)
case _ =>
foldOver(n, tp)
}
diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala
index f880b647e..c60f4c1f2 100644
--- a/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -175,8 +175,12 @@ object Inferencing {
/** Recursively widen and also follow type declarations and type aliases. */
def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match {
- case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi)
- case tp: AnnotatedType => tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot)
+ case tp: TypeRef if !tp.symbol.isClass =>
+ widenForMatchSelector(tp.superType)
+ case tp: HKApply =>
+ widenForMatchSelector(tp.superType)
+ case tp: AnnotatedType =>
+ tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot)
case tp => tp
}
diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala
index a8f3b8918..3b193d2db 100644
--- a/src/dotty/tools/dotc/typer/Namer.scala
+++ b/src/dotty/tools/dotc/typer/Namer.scala
@@ -14,6 +14,7 @@ import collection.mutable
import annotation.tailrec
import ErrorReporting._
import tpd.ListOfTreeDecorator
+import config.Config
import config.Printers._
import Annotations._
import Inferencing._
@@ -306,9 +307,14 @@ class Namer { typer: Typer =>
// have no implementation.
val cctx = if (tree.name == nme.CONSTRUCTOR && !(tree.mods is JavaDefined)) ctx.outer else ctx
+ val completer = tree match {
+ case tree: TypeDef => new TypeDefCompleter(tree)(cctx)
+ case _ => new Completer(tree)(cctx)
+ }
+
recordSym(ctx.newSymbol(
ctx.owner, name, flags | deferred | method | higherKinded | inSuperCall1,
- adjustIfModule(new Completer(tree)(cctx), tree),
+ adjustIfModule(completer, tree),
privateWithinClass(tree.mods), tree.pos), tree)
case tree: Import =>
recordSym(ctx.newSymbol(
@@ -488,32 +494,11 @@ class Namer { typer: Typer =>
}
/** The completer of a symbol defined by a member def or import (except ClassSymbols) */
- class Completer(val original: Tree)(implicit ctx: Context) extends TypeParamsCompleter {
+ class Completer(val original: Tree)(implicit ctx: Context) extends LazyType {
protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original)
- private var myTypeParams: List[TypeSymbol] = null
- private var nestedCtx: Context = null
-
- def completerTypeParams(sym: Symbol): List[TypeSymbol] = {
- if (myTypeParams == null) {
- //println(i"completing type params of $sym in ${sym.owner}")
- myTypeParams = original match {
- case tdef: TypeDef =>
- nestedCtx = localContext(sym).setNewScope
- locally {
- implicit val ctx: Context = nestedCtx
- completeParams(tdef.tparams)
- tdef.tparams.map(symbolOfTree(_).asType)
- }
- case _ =>
- Nil
- }
- }
- myTypeParams
- }
-
- private def typeSig(sym: Symbol): Type = original match {
+ protected def typeSig(sym: Symbol): Type = original match {
case original: ValDef =>
if (sym is Module) moduleValSig(sym)
else valOrDefDefSig(original, sym, Nil, Nil, identity)(localContext(sym).setNewScope)
@@ -521,9 +506,6 @@ class Namer { typer: Typer =>
val typer1 = ctx.typer.newLikeThis
nestedTyper(sym) = typer1
typer1.defDefSig(original, sym)(localContext(sym).setTyper(typer1))
- case original: TypeDef =>
- assert(!original.isClassDef)
- typeDefSig(original, sym, completerTypeParams(sym))(nestedCtx)
case imp: Import =>
try {
val expr1 = typedAheadExpr(imp.expr, AnySelectionProto)
@@ -568,6 +550,28 @@ class Namer { typer: Typer =>
}
}
+ class TypeDefCompleter(original: TypeDef)(ictx: Context) extends Completer(original)(ictx) with TypeParamsCompleter {
+ private var myTypeParams: List[TypeSymbol] = null
+ private var nestedCtx: Context = null
+ assert(!original.isClassDef)
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] = {
+ if (myTypeParams == null) {
+ //println(i"completing type params of $sym in ${sym.owner}")
+ nestedCtx = localContext(sym).setNewScope
+ myTypeParams = {
+ implicit val ctx: Context = nestedCtx
+ completeParams(original.tparams)
+ original.tparams.map(symbolOfTree(_).asType)
+ }
+ }
+ myTypeParams
+ }
+
+ override protected def typeSig(sym: Symbol): Type =
+ typeDefSig(original, sym, completerTypeParams(sym)(ictx))(nestedCtx)
+ }
+
class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) {
withDecls(newScope)
@@ -591,7 +595,7 @@ class Namer { typer: Typer =>
*/
def parentType(parent: untpd.Tree)(implicit ctx: Context): Type =
if (parent.isType) {
- typedAheadType(parent).tpe
+ typedAheadType(parent, AnyTypeConstructorProto).tpe
} else {
val (core, targs) = stripApply(parent) match {
case TypeApply(core, targs) => (core, targs)
@@ -934,7 +938,7 @@ class Namer { typer: Typer =>
//val toParameterize = tparamSyms.nonEmpty && !isDerived
//val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived
def abstracted(tp: Type): Type =
- if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms)
+ if (tparamSyms.nonEmpty && !tp.isHK) tp.LambdaAbstract(tparamSyms)
//else if (toParameterize) tp.parameterizeWith(tparamSyms)
else tp
@@ -972,28 +976,6 @@ class Namer { typer: Typer =>
}
ensureUpToDate(sym.typeRef, dummyInfo)
ensureUpToDate(sym.typeRef.appliedTo(tparamSyms.map(_.typeRef)), TypeBounds.empty)
-
- etaExpandArgs.apply(sym.info)
- }
-
- /** Eta expand all class types C appearing as arguments to a higher-kinded
- * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary
- * because in `typedAppliedTypeTree` we might have missed some eta expansions
- * of arguments in F-bounds, because the recursive type was initialized with
- * TypeBounds.empty.
- */
- def etaExpandArgs(implicit ctx: Context) = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case tp: RefinedType =>
- val args = tp.argInfos.mapconserve(this)
- if (args.nonEmpty) {
- val tycon = tp.withoutArgs(args)
- val tycon1 = this(tycon)
- val tparams = tycon.typeParams
- val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args
- if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1)
- } else mapOver(tp)
- case _ => mapOver(tp)
- }
+ sym.info
}
}
diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 740258821..3a13212a3 100644
--- a/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -286,7 +286,7 @@ object ProtoTypes {
override def isMatchedBy(tp: Type)(implicit ctx: Context) = {
def isInstantiatable(tp: Type) = tp.widen match {
- case PolyType(paramNames) => paramNames.length == targs.length
+ case tp: GenericType => tp.paramNames.length == targs.length
case _ => false
}
isInstantiatable(tp) || tp.member(nme.apply).hasAltWith(d => isInstantiatable(d.info))
@@ -311,6 +311,9 @@ object ProtoTypes {
*/
@sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways
+ /** A prototype for type constructors that are followed by a type application */
+ @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways
+
/** Add all parameters in given polytype `pt` to the constraint's domain.
* If the constraint contains already some of these parameters in its domain,
* make a copy of the polytype and add the copy's type parameters instead.
@@ -402,6 +405,11 @@ object ProtoTypes {
WildcardType(TypeBounds.upper(wildApprox(mt.paramTypes(pnum))))
case tp: TypeVar =>
wildApprox(tp.underlying)
+ case tp @ HKApply(tycon, args) =>
+ wildApprox(tycon) match {
+ case _: WildcardType => WildcardType // this ensures we get a * type
+ case tycon1 => tp.derivedAppliedType(tycon1, args.mapConserve(wildApprox(_)))
+ }
case tp: AndType =>
val tp1a = wildApprox(tp.tp1)
val tp2a = wildApprox(tp.tp2)
diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 995fa43ca..0344ae6c6 100644
--- a/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -96,14 +96,17 @@ trait TypeAssigner {
}
case tp @ AppliedType(tycon, args) if toAvoid(tycon) =>
val base = apply(tycon)
- val args = tp.baseArgInfos(base.typeSymbol)
- if (base.typeParams.length == args.length) base.appliedTo(args) else base
- case tp @ RefinedType(parent, name) if variance > 0 =>
+ var args = tp.baseArgInfos(base.typeSymbol)
+ if (base.typeParams.length != args.length)
+ args = base.typeParams.map(_.paramBounds)
+ base.appliedTo(args)
+ case tp @ RefinedType(parent, name, rinfo) if variance > 0 =>
val parent1 = apply(tp.parent)
- val refinedInfo1 = apply(tp.refinedInfo)
+ val refinedInfo1 = apply(rinfo)
if (toAvoid(refinedInfo1)) {
typr.println(s"dropping refinement from $tp")
- parent1
+ if (name.isTypeName) tp.derivedRefinedType(parent1, name, TypeBounds.empty)
+ else parent1
} else {
tp.derivedRefinedType(parent1, name, refinedInfo1)
}
@@ -144,7 +147,7 @@ trait TypeAssigner {
* which are accessible.
*
* Also performs the following normalizations on the type `tpe`.
- * (1) parameter accessors are alwys dereferenced.
+ * (1) parameter accessors are always dereferenced.
* (2) if the owner of the denotation is a package object, it is assured
* that the package object shows up as the prefix.
*/
@@ -409,11 +412,11 @@ trait TypeAssigner {
def refineNamed(tycon: Type, arg: Tree) = arg match {
case ast.Trees.NamedArg(name, argtpt) =>
// Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error
- val tparam = tparams.find(_.name == name) match {
+ val tparam = tparams.find(_.paramName == name) match {
case Some(tparam) => tparam
case none => ntparams.find(_.name == name).getOrElse(NoSymbol)
}
- if (tparam.exists) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam))
+ if (tparam.isTypeParam) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam))
else errorType(i"$tycon does not have a parameter or abstract type member named $name", arg.pos)
case _ =>
errorType(s"named and positional type arguments may not be mixed", arg.pos)
@@ -425,6 +428,9 @@ trait TypeAssigner {
tree.withType(ownType)
}
+ def assignType(tree: untpd.TypeLambdaTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) =
+ tree.withType(TypeLambda.fromSymbols(tparamDefs.map(_.symbol), body.tpe))
+
def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) =
tree.withType(ExprType(result.tpe))
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index 268020ec5..4e2842da7 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -17,7 +17,6 @@ import SymDenotations._
import Annotations._
import Names._
import NameOps._
-import Applications._
import Flags._
import Decorators._
import ErrorReporting._
@@ -914,20 +913,21 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
typr.println(s"adding refinement $refinement")
checkRefinementNonCyclic(refinement, refineCls, seen)
val rsym = refinement.symbol
- if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty)
+ if (rsym.is(Method) && rsym.allOverriddenSymbols.isEmpty)
ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos)
val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info
- RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, RefinedThis(rt)))
+ RefinedType(parent, rsym.name, rinfo)
// todo later: check that refinement is within bounds
}
- val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1) withType
- (tpt1.tpe /: refinements1)(addRefinement)
+ val refined = (tpt1.tpe /: refinements1)(addRefinement)
+ val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1).withType(
+ RecType.closeOver(rt => refined.substThis(refineCls, RecThis(rt))))
typr.println(i"typed refinement: ${res.tpe}")
res
}
def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") {
- val tpt1 = typed(tree.tpt)(ctx retractMode Mode.Pattern)
+ val tpt1 = typed(tree.tpt, AnyTypeConstructorProto)(ctx.retractMode(Mode.Pattern))
val tparams = tpt1.tpe.typeParams
if (tparams.isEmpty) {
ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos)
@@ -942,14 +942,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos)
args = args.take(tparams.length)
}
- def typedArg(arg: untpd.Tree, tparam: Symbol) = {
+ def typedArg(arg: untpd.Tree, tparam: TypeParamInfo) = {
val (desugaredArg, argPt) =
if (ctx.mode is Mode.Pattern)
- (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.info)
+ (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.paramBounds)
else
(arg, WildcardType)
- val arg1 = typed(desugaredArg, argPt)
- adaptTypeArg(arg1, tparam.info)
+ typed(desugaredArg, argPt)
}
args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]]
}
@@ -958,6 +957,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ def typedTypeLambdaTree(tree: untpd.TypeLambdaTree)(implicit ctx: Context): Tree = track("typedTypeLambdaTree") {
+ val TypeLambdaTree(tparams, body) = tree
+ index(tparams)
+ val tparams1 = tparams.mapconserve(typed(_).asInstanceOf[TypeDef])
+ val body1 = typedType(tree.body)
+ assignType(cpy.TypeLambdaTree(tree)(tparams1, body1), tparams1, body1)
+ }
+
def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") {
val result1 = typed(tree.result)
assignType(cpy.ByNameTypeTree(tree)(result1), result1)
@@ -1033,7 +1040,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val tparams1 = tparams mapconserve (typed(_).asInstanceOf[TypeDef])
val vparamss1 = vparamss nestedMapconserve (typed(_).asInstanceOf[ValDef])
if (sym is Implicit) checkImplicitParamsNotSingletons(vparamss1)
- val tpt1 = checkSimpleKinded(typedType(tpt))
+ var tpt1 = checkSimpleKinded(typedType(tpt))
var rhsCtx = ctx
if (sym.isConstructor && !sym.isPrimaryConstructor && tparams1.nonEmpty) {
@@ -1045,13 +1052,18 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
rhsCtx.gadt.setBounds(tdef.symbol, TypeAlias(tparam.typeRef)))
}
val rhs1 = typedExpr(ddef.rhs, tpt1.tpe)(rhsCtx)
+ if (sym.isAnonymousFunction) {
+ // If we define an anonymous function, make sure the return type does not
+ // refer to parameters. This is necessary because closure types are
+ // function types so no dependencies on parameters are allowed.
+ tpt1 = tpt1.withType(avoid(tpt1.tpe, vparamss1.flatMap(_.map(_.symbol))))
+ }
assignType(cpy.DefDef(ddef)(name, tparams1, vparamss1, tpt1, rhs1), sym)
//todo: make sure dependent method types do not depend on implicits or by-name params
}
def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context): Tree = track("typedTypeDef") {
val TypeDef(name, rhs) = tdef
- checkLowerNotHK(sym, tdef.tparams.map(symbolOfTree), tdef.pos)
completeAnnotations(tdef, sym)
assignType(cpy.TypeDef(tdef)(name, typedType(rhs), Nil), sym)
}
@@ -1272,6 +1284,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case tree: untpd.OrTypeTree => typedOrTypeTree(tree)
case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree)
case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: untpd.TypeLambdaTree => typedTypeLambdaTree(tree)(localContext(tree, NoSymbol).setNewScope)
case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree)
case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree)
case tree: untpd.Alternative => typedAlternative(tree, pt)
@@ -1662,6 +1675,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ def adaptType(tp: Type): Tree = {
+ val tree1 =
+ if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree
+ else tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols))
+ if ((ctx.mode is Mode.Pattern) || tree1.tpe <:< pt) tree1
+ else err.typeMismatch(tree1, pt)
+ }
+
tree match {
case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[_] => tree
case _ => tree.tpe.widen match {
@@ -1695,9 +1716,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
(_, _) => tree // error will be reported in typedTypeApply
}
case _ =>
- if (ctx.mode is Mode.Type)
- if ((ctx.mode is Mode.Pattern) || tree.tpe <:< pt) tree
- else err.typeMismatch(tree, pt)
+ if (ctx.mode is Mode.Type) adaptType(tree.tpe)
else adaptNoArgs(wtp)
}
}
diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala
index 55e6b5232..a8abe5e30 100644
--- a/src/dotty/tools/dotc/typer/Variances.scala
+++ b/src/dotty/tools/dotc/typer/Variances.scala
@@ -75,13 +75,26 @@ object Variances {
case tp @ TypeBounds(lo, hi) =>
if (lo eq hi) compose(varianceInType(hi)(tparam), tp.variance)
else flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
- case tp @ RefinedType(parent, _) =>
- varianceInType(parent)(tparam) & varianceInType(tp.refinedInfo)(tparam)
+ case tp @ RefinedType(parent, _, rinfo) =>
+ varianceInType(parent)(tparam) & varianceInType(rinfo)(tparam)
+ case tp: RecType =>
+ varianceInType(tp.parent)(tparam)
case tp @ MethodType(_, paramTypes) =>
flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam)
case ExprType(restpe) =>
varianceInType(restpe)(tparam)
- case tp @ PolyType(_) =>
+ case tp @ HKApply(tycon, args) =>
+ def varianceInArgs(v: Variance, args: List[Type], tparams: List[TypeParamInfo]): Variance =
+ args match {
+ case arg :: args1 =>
+ varianceInArgs(
+ v & compose(varianceInType(arg)(tparam), tparams.head.paramVariance),
+ args1, tparams.tail)
+ case nil =>
+ v
+ }
+ varianceInArgs(varianceInType(tycon)(tparam), args, tycon.typeParams)
+ case tp: GenericType =>
flip(varianceInTypes(tp.paramBounds)(tparam)) & varianceInType(tp.resultType)(tparam)
case AnnotatedType(tp, annot) =>
varianceInType(tp)(tparam) & varianceInAnnot(annot)(tparam)
diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala
index bac443735..4361ccc13 100644
--- a/test/dotc/tests.scala
+++ b/test/dotc/tests.scala
@@ -29,7 +29,6 @@ class tests extends CompilerTest {
else List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef")
}
-
val testPickling = List("-Xprint-types", "-Ytest-pickler", "-Ystop-after:pickler")
val twice = List("#runs", "2")
@@ -149,6 +148,7 @@ class tests extends CompilerTest {
@Test def compileMixed = compileLine(
"""tests/pos/B.scala
|./scala-scala/src/library/scala/collection/immutable/Seq.scala
+ |./scala-scala/src/library/scala/collection/parallel/ParSeq.scala
|./scala-scala/src/library/scala/package.scala
|./scala-scala/src/library/scala/collection/GenSeqLike.scala
|./scala-scala/src/library/scala/collection/SeqLike.scala
diff --git a/tests/neg/named-params.scala b/tests/disabled/neg/named-params.scala
index 5a2375b15..5a2375b15 100644
--- a/tests/neg/named-params.scala
+++ b/tests/disabled/neg/named-params.scala
diff --git a/tests/disabled/not-representable/pos/t2066.scala b/tests/disabled/not-representable/pos/t2066.scala
index 30cb99d45..d175769fa 100644
--- a/tests/disabled/not-representable/pos/t2066.scala
+++ b/tests/disabled/not-representable/pos/t2066.scala
@@ -3,7 +3,7 @@ trait A1 {
}
trait B1 extends A1 {
- override def f[T[_]] = ()
+ override def f[T[+_]] = ()
}
@@ -12,12 +12,12 @@ trait A2 {
}
trait B2 extends A2 {
- override def f[T[_]] = ()
+ override def f[T[-_]] = ()
}
trait A3 {
- def f[T[X[_]]] = ()
+ def f[T[X[+_]]] = ()
}
trait B3 extends A3 {
diff --git a/tests/pos/CollectionStrawMan3.scala b/tests/disabled/pos/CollectionStrawMan3.scala
index c21a73f00..c21a73f00 100644
--- a/tests/pos/CollectionStrawMan3.scala
+++ b/tests/disabled/pos/CollectionStrawMan3.scala
diff --git a/tests/pos/flowops.scala b/tests/disabled/pos/flowops.scala
index 6aead26be..6aead26be 100644
--- a/tests/pos/flowops.scala
+++ b/tests/disabled/pos/flowops.scala
diff --git a/tests/pos/flowops1.scala b/tests/disabled/pos/flowops1.scala
index 649a9b18c..649a9b18c 100644
--- a/tests/pos/flowops1.scala
+++ b/tests/disabled/pos/flowops1.scala
diff --git a/tests/pos/hk-named.scala b/tests/disabled/pos/hk-named.scala
index 5f2cb6c74..5f2cb6c74 100644
--- a/tests/pos/hk-named.scala
+++ b/tests/disabled/pos/hk-named.scala
diff --git a/tests/pos/named-params.scala b/tests/disabled/pos/named-params.scala
index 3fab24cd2..3fab24cd2 100644
--- a/tests/pos/named-params.scala
+++ b/tests/disabled/pos/named-params.scala
diff --git a/tests/neg/boundspropagation.scala b/tests/neg/boundspropagation.scala
index b545b09da..dd4ebf513 100644
--- a/tests/neg/boundspropagation.scala
+++ b/tests/neg/boundspropagation.scala
@@ -40,5 +40,5 @@ object test4 {
}
class Test5 {
-"": ({ type U = this.type })#U // error // error
+"": ({ type U = this.type })#U // error
}
diff --git a/tests/neg/existentials.scala b/tests/neg/existentials.scala
new file mode 100644
index 000000000..4798504d9
--- /dev/null
+++ b/tests/neg/existentials.scala
@@ -0,0 +1,61 @@
+object TestList {
+
+ var x: ([X] -> List[List[X]])[_] = List(List(1)) // error: unreducible
+ var y: ([X] -> List[Seq[X]])[_] = List(List(1)) // error: unreducible
+
+ x = x
+ y = y
+ y = x
+
+ val h = x.head
+ val x1: List[_] = h
+
+ var z: List[_] = x
+
+}
+object TestSet {
+
+ var x: ([Y] -> Set[Set[Y]])[_] = Set(Set("a")) // error: unreducible
+ var y: ([Y] -> Set[Iterable[Y]])[_] = Set(Set("a")) // error: unreducible
+
+ x = x
+ y = y
+
+ val h = x.head
+ val h1: Set[_] = h
+
+ // val p = x.+ // infinite loop in implicit search
+
+ var z: Set[_] = x
+
+}
+class TestX {
+
+ class C[T](x: T) {
+ def get: T = x
+ def cmp: T => Boolean = (x == _)
+ }
+
+ val x: ([Y] -> C[C[Y]])[_] = new C(new C("a")) // error: unreducible
+
+ type CC[X] = C[C[X]]
+ val y: CC[_] = ??? // error: unreducible
+
+ type D[X] <: C[X]
+
+ type DD = [X] -> D[D[X]]
+ val z: DD[_] = ??? // error: unreducible
+
+ val g = x.get
+
+ val c = x.cmp
+}
+
+object Test6014 {
+ case class CC[T](key: T)
+ type Alias[T] = Seq[CC[T]]
+
+ def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok
+ def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // error: unreducible application
+}
+
diff --git a/tests/neg/hk-bounds.scala b/tests/neg/hk-bounds.scala
new file mode 100644
index 000000000..db6712d72
--- /dev/null
+++ b/tests/neg/hk-bounds.scala
@@ -0,0 +1,31 @@
+class Foo[A]
+class Bar[B]
+class Baz[C] extends Bar[C]
+
+object Test1 {
+ type Alias[F[X] <: Foo[X]] = F[Int]
+
+ val x: Alias[Bar] = new Bar[Int] // error: Type argument [X0] -> Bar[X0] does not conform to upper bound [X0] -> Foo[X0]
+
+ def foo[F[X] <: Foo[X]] = ()
+ foo[Bar] // error: Type argument [X0] -> Bar[X0] does not conform to upper bound [X0] -> Foo[X0]
+
+ def bar[B[X] >: Bar[X]] = ()
+ bar[Bar] // ok
+ bar[Baz] // // error: Type argument [X0] -> Baz[X0] does not conform to lower bound [X0] -> Bar[X0]
+ bar[Foo] // error: Type argument [X0] -> Foo[X0] does not conform to lower bound [X0] -> Bar[X0]
+
+ def baz[B[X] >: Baz[X]] = ()
+ baz[Bar] //ok
+ baz[Baz] //ok
+ baz[Foo] // error: Type argument [X0] -> Foo[X0] does not conform to lower bound [X0] -> Baz[X0]
+
+}
+object Test2 {
+ type Alias[F[X] <: Foo[X]] = F[Int]
+
+ def foo[M[_[_]], A[_]]: M[A] = null.asInstanceOf[M[A]]
+
+ val x = foo[Alias, Bar] // error: Type argument Test2.Alias does not conform to upper bound [X0 <: [X0] -> Any] -> Any
+
+}
diff --git a/tests/neg/hk-variance.scala b/tests/neg/hk-variance.scala
new file mode 100644
index 000000000..fec5cc366
--- /dev/null
+++ b/tests/neg/hk-variance.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ def f[C[+X]] = ()
+
+ class D[X] {}
+
+ f[D] // error
+
+ def g[E[-Y]] = f[E] // error
+
+}
diff --git a/tests/neg/hklower.scala b/tests/neg/hklower.scala
deleted file mode 100644
index 5c1ba27ba..000000000
--- a/tests/neg/hklower.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class Test { // error conflicting bounds
-
- type T[X] // OK
- type U[X] = T[X] // OK
-
- type V[X] >: T[X] // error
- type W[X] >: T[X] <: T[X] // error
-
- def f[C[X] >: T[X]]() = ??? // error
-
-}
diff --git a/tests/neg/hklower2.scala b/tests/neg/hklower2.scala
new file mode 100644
index 000000000..8268bf09f
--- /dev/null
+++ b/tests/neg/hklower2.scala
@@ -0,0 +1,4 @@
+class Test { // error: conflicting bounds
+ trait T[X]
+ type Z[X] >: String <: T[X]
+}
diff --git a/tests/neg/i39.scala b/tests/neg/i39.scala
index df53d9816..8a13a7d06 100644
--- a/tests/neg/i39.scala
+++ b/tests/neg/i39.scala
@@ -1,7 +1,7 @@
object i39neg {
trait B {
- type D <: { type T } // error
+ type D <: { type T }
def d: D
}
diff --git a/tests/neg/i50-volatile.scala b/tests/neg/i50-volatile.scala
index f6fa3466d..fcfc9592b 100644
--- a/tests/neg/i50-volatile.scala
+++ b/tests/neg/i50-volatile.scala
@@ -3,10 +3,10 @@ class Test {
class Inner
}
type A <: Base {
- type X = String // error
+ type X = String // old-error
}
type B <: {
- type X = Int // error
+ type X = Int // old-error
}
lazy val o: A & B = ???
diff --git a/tests/neg/kinds.scala b/tests/neg/kinds.scala
new file mode 100644
index 000000000..312c5d45e
--- /dev/null
+++ b/tests/neg/kinds.scala
@@ -0,0 +1,18 @@
+object Test {
+
+ class C[T]
+ class C2[T[X]]
+
+ class B
+
+ val x: C[C] = ??? // error: missing type parameter(s)
+ val y: C2[C] = ???
+
+ def f[T] = ???
+
+ def f2[T[X]] = ???
+
+ f[C] // error: missing type parameter(s)
+ f2[C]
+
+}
diff --git a/tests/neg/ski.scala b/tests/neg/ski.scala
index b192dc9e2..90a43039a 100644
--- a/tests/neg/ski.scala
+++ b/tests/neg/ski.scala
@@ -17,8 +17,8 @@ trait S2[x <: Term, y <: Term] extends Term {
type eval = S2[x, y]
}
trait S3[x <: Term, y <: Term, z <: Term] extends Term {
- type ap[v <: Term] = eval#ap[v] // error
- type eval = x#ap[z]#ap[y#ap[z]]#eval // error // error
+ type ap[v <: Term] = eval#ap[v] // error: not a legal path
+ type eval = x#ap[z]#ap[y#ap[z]]#eval // error: not a legal path // error: not a legal path
}
// The K combinator
@@ -31,8 +31,8 @@ trait K1[x <: Term] extends Term {
type eval = K1[x]
}
trait K2[x <: Term, y <: Term] extends Term {
- type ap[z <: Term] = eval#ap[z] // error
- type eval = x#eval // error
+ type ap[z <: Term] = eval#ap[z] // error: not a legal path
+ type eval = x#eval // error: not a legal path
}
// The I combinator
@@ -41,8 +41,8 @@ trait I extends Term {
type eval = I
}
trait I1[x <: Term] extends Term {
- type ap[y <: Term] = eval#ap[y] // error
- type eval = x#eval // error
+ type ap[y <: Term] = eval#ap[y] // error: not a legal path
+ type eval = x#eval // error: not a legal path
}
// Constants
@@ -64,9 +64,10 @@ case class Equals[A >: B <:B , B]()
object Test {
type T1 = Equals[Int, Int] // compiles fine
- type T2 = Equals[String, Int] // error
+ type T2 = Equals[String, Int] // error: Type argument String does not conform to upper bound Int
+
type T3 = Equals[I#ap[c]#eval, c]
- type T3a = Equals[I#ap[c]#eval, d] // error
+ type T3a = Equals[I#ap[c]#eval, d] // error: Type argument I1[c]#eval does not conform to upper bound d
// Ic -> c
type T4 = Equals[I#ap[c]#eval, c]
@@ -106,11 +107,11 @@ object Test {
type eval = A0
}
trait A1 extends Term {
- type ap[x <: Term] = x#ap[A0]#eval // error
+ type ap[x <: Term] = x#ap[A0]#eval // error: not a legal path
type eval = A1
}
trait A2 extends Term {
- type ap[x <: Term] = x#ap[A1]#eval // error
+ type ap[x <: Term] = x#ap[A1]#eval // error: not a legal path
type eval = A2
}
@@ -126,7 +127,7 @@ object Test {
type T15 = Equals[NN3#eval, c]
trait An extends Term {
- type ap[x <: Term] = x#ap[An]#eval // error
+ type ap[x <: Term] = x#ap[An]#eval // error: not a legal path
type eval = An
}
diff --git a/tests/neg/subtyping.scala b/tests/neg/subtyping.scala
index 27cc0568e..351fa0ecd 100644
--- a/tests/neg/subtyping.scala
+++ b/tests/neg/subtyping.scala
@@ -8,7 +8,7 @@ object Test {
implicitly[B#X <:< A#X] // error: no implicit argument
}
def test2(): Unit = {
- val a : { type T; type U } = ??? // error // error
+ val a : { type T; type U } = ???
implicitly[a.T <:< a.U] // error: no implicit argument
}
}
diff --git a/tests/neg/t2994.scala b/tests/neg/t2994.scala
index 23a3b6a8b..e19397a3d 100644
--- a/tests/neg/t2994.scala
+++ b/tests/neg/t2994.scala
@@ -7,7 +7,7 @@ object Naturals {
type a[s[_ <: NAT] <: NAT, z <: NAT] = z
}
final class SUCC[n <: NAT] extends NAT {
- type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]] // old-error: not a legal path
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]] // error: not a legal path
}
type _0 = ZERO
type _1 = SUCC[_0]
@@ -20,8 +20,8 @@ object Naturals {
// crashes scala-2.8.0 beta1
trait MUL[n <: NAT, m <: NAT] extends NAT {
- trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] } // can't do double param lists: // error: `]' expected but `[` found. // error: wrong number of type arguments
- type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // old-error: not a legal path // old-error: not a legal path
+ trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] }
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] // error: not a legal path // error: not a legal path // error: arg does not conform to bound // error: arg does not conform to bound
}
}
diff --git a/tests/neg/t7278.scala b/tests/neg/t7278.scala
index 7aafbb76f..643a3c858 100644
--- a/tests/neg/t7278.scala
+++ b/tests/neg/t7278.scala
@@ -13,8 +13,8 @@ object Test {
def fail1(): Unit = {
val b = new B
- var x1: EE[A] = null
- var x2: EE[B] = new b.E // old-error: found: B#E, required: A#E
+ var x1: EE[A] = null // error: Type argument A does not conform to upper bound EC
+ var x2: EE[B] = new b.E // error: Type argument B does not conform to upper bound EC
// x1 = x2 // gives a prior type error: B#E, required: A#E, masked to get at the real thing.
}
@@ -27,8 +27,8 @@ object Test {
}
*/
def fail3(): Unit = {
- var x1: EE[C] = 5
- var x2: EE[C & D] = ""
+ var x1: EE[C] = 5 // error: Type argument C does not conform to upper bound EC
+ var x2: EE[C & D] = "" // error: Type argument C & D does not conform to upper bound EC
x1 = x2
}
diff --git a/tests/neg/zoo.scala b/tests/neg/zoo.scala
index 3d9b77b72..19efcc1d7 100644
--- a/tests/neg/zoo.scala
+++ b/tests/neg/zoo.scala
@@ -1,23 +1,23 @@
object Test {
type Meat = {
- type IsMeat = Any // error
+ type IsMeat = Any
}
type Grass = {
- type IsGrass = Any // error
+ type IsGrass = Any
}
type Animal = {
- type Food // error
+ type Food
def eats(food: Food): Unit // error
def gets: Food // error
}
type Cow = {
- type IsMeat = Any // error
- type Food <: Grass // error
+ type IsMeat = Any
+ type Food <: Grass
def eats(food: Grass): Unit // error
def gets: Grass // error
}
type Lion = {
- type Food = Meat // error
+ type Food = Meat
def eats(food: Meat): Unit // error
def gets: Meat // error
}
diff --git a/tests/pos/apply-equiv.scala b/tests/pending/pos/apply-equiv.scala
index f53b8b5ab..f53b8b5ab 100644
--- a/tests/pos/apply-equiv.scala
+++ b/tests/pending/pos/apply-equiv.scala
diff --git a/tests/pickling/i94-nada.scala b/tests/pickling/i94-nada.scala
index ce8dc98ad..cf39ee2ae 100644
--- a/tests/pickling/i94-nada.scala
+++ b/tests/pickling/i94-nada.scala
@@ -27,7 +27,7 @@ trait Test1 {
case class Left[A,B](x: A) extends Either[A,B] with Monad[A]
case class Right[A,B](x: B) extends Either[A,B] with Monad[B]
def flatMap[X,Y,M[X]<:Monad[X]](m: M[X], f: X => M[Y]): M[Y] = f(m.x)
- println(flatMap(Left(1), {x: Int => Left(x)}))
+ println(flatMap(Right(1), {x: Int => Right(x)}))
}
trait Test2 {
trait Monad[X] {
@@ -37,7 +37,7 @@ trait Test2 {
case class Left[A,B](x: A) extends Either[A,B] with Monad[A]
case class Right[A,B](x: B) extends Either[A,B] with Monad[B]
def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y]
- println(flatMap(Left(1), {x: Int => Left(x)}))
+ println(flatMap(Right(1), {x: Int => Right(x)}))
}
trait Test3 {
def flatMap[X,Y,M[X]](m: M[X], f: X => M[Y]): M[Y]
diff --git a/tests/pos/GenTraversableFactory.scala b/tests/pos-scala2/GenTraversableFactory.scala
index 2f93ab27b..2f93ab27b 100644
--- a/tests/pos/GenTraversableFactory.scala
+++ b/tests/pos-scala2/GenTraversableFactory.scala
diff --git a/tests/pos-scala2/t2994.scala b/tests/pos-scala2/t2994.scala
index c7421c42a..f3009b12f 100644
--- a/tests/pos-scala2/t2994.scala
+++ b/tests/pos-scala2/t2994.scala
@@ -20,7 +20,7 @@ object Naturals {
// crashes scala-2.8.0 beta1
trait MUL[n <: NAT, m <: NAT] extends NAT {
- trait curry[n[_, _], s[_]] { type f[z <: NAT] = n[s, z] }
+ trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] }
type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z]
}
diff --git a/tests/pos/t6014.scala b/tests/pos-scala2/t6014.scala
index 26e258a27..02535f377 100644
--- a/tests/pos/t6014.scala
+++ b/tests/pos-scala2/t6014.scala
@@ -3,7 +3,7 @@ object Test {
type Alias[T] = Seq[CC[T]]
def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok
- def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails
+ def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // migration warning: unreducible application
// ./a.scala:11: error: missing parameter type for expanded function
// The argument types of an anonymous function must be fully known. (SLS 8.5)
// Expected type was: ?
diff --git a/tests/pos/hk-subtyping.scala b/tests/pos/hk-subtyping.scala
new file mode 100644
index 000000000..a004c2618
--- /dev/null
+++ b/tests/pos/hk-subtyping.scala
@@ -0,0 +1,13 @@
+object Test {
+
+ def compare[S <: T, T] = ()
+
+ compare[Int, Int]
+ compare[Int, Any]
+
+ def f[C <: List] = {
+ compare[C[Int], List[Int]]
+ }
+
+
+}
diff --git a/tests/pos/hklower.scala b/tests/pos/hklower.scala
new file mode 100644
index 000000000..90aa343ba
--- /dev/null
+++ b/tests/pos/hklower.scala
@@ -0,0 +1,41 @@
+class Test {
+
+ type T[X]
+ type U[X] = T[X]
+
+ type V[X] >: T[X]
+ type W[X] >: T[X] <: T[X]
+
+ def f[C[X] >: T[X]](x: C[Int]) = ???
+
+ val v: V[Int] = ???
+ val t: T[Int] = ???
+
+ f[V](v)
+
+ f[V](t)
+
+
+}
+class Test2 {
+
+ class T[X]
+ type U[X] = T[X]
+
+ type V[X] >: T[X]
+ type W[X] >: T[X] <: T[X]
+
+ def f[C[X] >: T[X]](x: C[Int]) = ???
+
+ val v: V[Int] = ???
+ val t: T[Int] = ???
+
+ f[V](v)
+
+ f[V](t)
+
+ var x: V[Int] = _
+ x = t
+
+
+}
diff --git a/tests/pending/pos/i1181.scala b/tests/pos/i1181.scala
index 057c938d3..057c938d3 100644
--- a/tests/pending/pos/i1181.scala
+++ b/tests/pos/i1181.scala
diff --git a/tests/pos/i94-nada.scala b/tests/pos/i94-nada.scala
index f8263ccf2..2c3cf895c 100644
--- a/tests/pos/i94-nada.scala
+++ b/tests/pos/i94-nada.scala
@@ -25,7 +25,7 @@ trait Test1 {
case class Left[A,B](x: A) extends Either[A,B] with Monad[A]
case class Right[A,B](x: B) extends Either[A,B] with Monad[B]
def flatMap[X,Y,M[X]<:Monad[X]](m: M[X], f: X => M[Y]): M[Y] = f(m.x)
- println(flatMap(Left(1), {x: Int => Left(x)}))
+ println(flatMap(Right(1), {x: Int => Right(x)}))
}
trait Test2 {
trait Monad[X] {
diff --git a/tests/pos/jon.scala b/tests/pos/jon.scala
index d4ea74f02..224486945 100644
--- a/tests/pos/jon.scala
+++ b/tests/pos/jon.scala
@@ -4,5 +4,5 @@ object Test {
val x = List(List, Vector)
- val y: List[scala.collection.generic.SeqFactory] = x
+ val y: List[scala.collection.generic.SeqFactory[_]] = x
}
diff --git a/tests/pos/lookuprefined.scala b/tests/pos/lookuprefined.scala
index f7e7f7337..9dd2b4abb 100644
--- a/tests/pos/lookuprefined.scala
+++ b/tests/pos/lookuprefined.scala
@@ -2,7 +2,9 @@ class C { type T; type U }
trait Test {
- val x: (C { type U = T } { type T = String }) # U
- val y: String = x
+ val x1: (C { type U = T; type T = String }) # U
+ val x2: (C { type U = T } {type T = String }) # U
+ val y1: String = x1
+ val y2: String = x2
}
diff --git a/tests/pos/range.scala b/tests/pos/range.scala
index 9e7b5d1c9..a33f7fcee 100644
--- a/tests/pos/range.scala
+++ b/tests/pos/range.scala
@@ -1,8 +1,8 @@
import scala.math._
import collection.immutable.NumericRange
object Test {
- val r1: scala.collection.immutable.Range.Partial = ???
- val r2: scala.Range.Partial = r1
+ val r1: scala.collection.immutable.Range.Partial[_, _] = ???
+ val r2: scala.Range.Partial[_, _] = r1
def until(d: BigDecimal, end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Exclusive[BigDecimal]] =
new Range.Partial(until(d, end, _))
def until(d: BigDecimal, end: BigDecimal, step: BigDecimal) = Range.BigDecimal(d, end, step)
diff --git a/tests/pos/t2066.scala b/tests/pos/t2066.scala
new file mode 100644
index 000000000..d175769fa
--- /dev/null
+++ b/tests/pos/t2066.scala
@@ -0,0 +1,25 @@
+trait A1 {
+ def f[T[+_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[+_]] = ()
+}
+
+
+trait A2 {
+ def f[T[-_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+ def f[T[X[+_]]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[X[+_]]] = ()
+}
diff --git a/tests/pos/t2613.scala b/tests/pos/t2613.scala
index c234d4c0d..17ebe2d7e 100644
--- a/tests/pos/t2613.scala
+++ b/tests/pos/t2613.scala
@@ -5,7 +5,7 @@ object Test {
abstract class MyRelation [R <: Row, +Relation <: MyRelation[R, Relation]]
- type M = MyRelation[_ <: Row, _ <: MyRelation]
+ type M = MyRelation[_ <: Row, _ <: MyRelation[_, _]]
val (x,y): (String, M) = null
}
diff --git a/tests/pos/t2712-1.scala b/tests/pos/t2712-1.scala
new file mode 100644
index 000000000..4f84c9df5
--- /dev/null
+++ b/tests/pos/t2712-1.scala
@@ -0,0 +1,9 @@
+package test
+
+// Original test case from,
+//
+// https://issues.scala-lang.org/browse/SI-2712
+object Test {
+ def meh[M[_], A](x: M[A]): M[A] = x
+ meh{(x: Int) => x} // solves ?M = [X] Int => X and ?A = Int ...
+}
diff --git a/tests/pos/t2712-2.scala b/tests/pos/t2712-2.scala
new file mode 100644
index 000000000..95172545d
--- /dev/null
+++ b/tests/pos/t2712-2.scala
@@ -0,0 +1,25 @@
+package test
+
+// See: https://github.com/milessabin/si2712fix-demo/issues/3
+object Test {
+ trait A[T1, T2] { }
+ trait B[T1, T2] { }
+ class C[T] extends A[T, Long] with B[T, Double]
+ class CB extends A[Boolean, Long] with B[Boolean, Double]
+
+ trait A2[T]
+ trait B2[T]
+ class C2[T] extends A2[T] with B2[T]
+ class CB2 extends A2[Boolean] with B2[Boolean]
+
+ def meh[M[_], A](x: M[A]): M[A] = x
+
+ val m0 = meh(new C[Boolean])
+ m0: C[Boolean]
+ val m1 = meh(new CB)
+ m1: B[Boolean, Double] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order.
+ val m2 = meh(new C2[Boolean])
+ m2: C2[Boolean]
+ val m3 = meh(new CB2)
+ m3: B2[Boolean] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order.
+}
diff --git a/tests/pos/t2712-3.scala b/tests/pos/t2712-3.scala
new file mode 100644
index 000000000..dd599f40f
--- /dev/null
+++ b/tests/pos/t2712-3.scala
@@ -0,0 +1,24 @@
+package test
+
+object Test1 {
+ class Foo[T, F[_]]
+ def meh[M[_[_]], F[_]](x: M[F]): M[F] = x
+ meh(new Foo[Int, List]) // solves ?M = [X[_]]Foo[Int, X[_]] ?A = List ...
+}
+
+object Test2 {
+ trait TC[T]
+ class Foo[F[_], G[_]]
+ def meh[GG[_[_]]](g: GG[TC]) = ???
+ meh(new Foo[TC, TC]) // solves ?G = [X[_]]Foo[TC, X]
+}
+
+object Test3 {
+ trait TC[F[_]]
+ trait TC2[F[_]]
+ class Foo[F[_[_]], G[_[_]]]
+ new Foo[TC, TC2]
+
+ def meh[G[_[_[_]]]](g: G[TC2]) = ???
+ meh(new Foo[TC, TC2]) // solves ?G = [X[_[_]]]Foo[TC, X]
+}
diff --git a/tests/pos/t2712-4.scala b/tests/pos/t2712-4.scala
new file mode 100644
index 000000000..3e2e5cdda
--- /dev/null
+++ b/tests/pos/t2712-4.scala
@@ -0,0 +1,17 @@
+package test
+
+object Test1 {
+ trait X
+ trait Y extends X
+ class Foo[T, U <: X]
+ def meh[M[_ <: A], A](x: M[A]): M[A] = x
+ meh(new Foo[Int, Y])
+}
+
+object Test2 {
+ trait X
+ trait Y extends X
+ class Foo[T, U >: Y]
+ def meh[M[_ >: A], A](x: M[A]): M[A] = x
+ meh(new Foo[Int, X])
+}
diff --git a/tests/pos/t2712-5.scala b/tests/pos/t2712-5.scala
new file mode 100644
index 000000000..ed96d4c06
--- /dev/null
+++ b/tests/pos/t2712-5.scala
@@ -0,0 +1,29 @@
+package test
+
+import scala.language.higherKinds
+
+trait Functor[F[_]] {
+ def map[A, B](f: A => B, fa: F[A]): F[B]
+}
+
+object Functor {
+ implicit def function[A]: Functor[({ type l[B] = A => B })#l] =
+ new Functor[({ type l[B] = A => B })#l] {
+ def map[C, B](cb: C => B, ac: A => C): A => B = cb compose ac
+ }
+}
+
+object FunctorSyntax {
+ implicit class FunctorOps[F[_], A](fa: F[A])(implicit F: Functor[F]) {
+ def map[B](f: A => B): F[B] = F.map(f, fa)
+ }
+}
+
+object Test {
+
+ val f: Int => String = _.toString
+
+ import FunctorSyntax._
+
+ f.map((s: String) => s.reverse)
+}
diff --git a/tests/pos/t2712-6.scala b/tests/pos/t2712-6.scala
new file mode 100644
index 000000000..dbba60472
--- /dev/null
+++ b/tests/pos/t2712-6.scala
@@ -0,0 +1,12 @@
+package test
+
+object Tags {
+ type Tagged[A, T] = {type Tag = T; type Self = A}
+
+ type @@[T, Tag] = Tagged[T, Tag]
+
+ trait Disjunction
+
+ def meh[M[_], A](ma: M[A]): M[A] = ma
+ meh(null: Int @@ Disjunction)//.asInstanceOf[Int @@ Disjunction])
+}
diff --git a/tests/pos/t2712-7.scala b/tests/pos/t2712-7.scala
new file mode 100644
index 000000000..d9c5243f1
--- /dev/null
+++ b/tests/pos/t2712-7.scala
@@ -0,0 +1,15 @@
+package test
+
+// Cats Xor, Scalaz \/, scala.util.Either
+sealed abstract class Xor[+A, +B] extends Product with Serializable
+object Xor {
+ final case class Left[+A](a: A) extends (A Xor Nothing)
+ final case class Right[+B](b: B) extends (Nothing Xor B)
+}
+
+object TestXor {
+ import Xor._
+ def meh[F[_], A, B](fa: F[A])(f: A => B): F[B] = ???
+ meh(new Right(23): Xor[Boolean, Int])(_ < 13)
+ meh(new Left(true): Xor[Boolean, Int])(_ < 13)
+}
diff --git a/tests/pos/t5683.scala b/tests/pos/t5683.scala
new file mode 100644
index 000000000..05ab03579
--- /dev/null
+++ b/tests/pos/t5683.scala
@@ -0,0 +1,23 @@
+object Test {
+ trait NT[X]
+ trait W[W, A] extends NT[Int]
+ type StringW[T] = W[String, T]
+ trait K[M[_], A, B]
+
+ def k[M[_], B](f: Int => M[B]): K[M, Int, B] = null
+
+ val okay1: K[StringW,Int,Int] = k{ (y: Int) => null: StringW[Int] }
+ val okay2 = k[StringW,Int]{ (y: Int) => null: W[String, Int] }
+
+ val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+
+ // remove `extends NT[Int]`, and the last line gives an inference error
+ // rather than a crash.
+ // test/files/pos/t5683.scala:12: error: no type parameters for method k: (f: Int => M[B])Test.K[M,Int,B] exist so that it can be applied to arguments (Int => Test.W[String,Int])
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : Int => Test.W[String,Int]
+ // required: Int => ?M[?B]
+ // val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+ // ^
+}
diff --git a/tests/pos/tycons.scala b/tests/pos/tycons.scala
deleted file mode 100644
index 1ed4d2855..000000000
--- a/tests/pos/tycons.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-class TypeConstructor {
- type TypeArg
-}
-
-trait List[+T] extends TypeConstructor { type TypeArg <: T }
-
-trait Set[T] extends TypeConstructor { type TypeArg <: T }
-
-object obj extends List[Number] with Set[Exception] {
- val x: TypeArg = ???
- val n: Number = x
- val e: Exception = x
-}
-
-abstract class Functor[F <: TypeConstructor] {
- def map[A, B](f: F { type TypeArg <: A }): F { type TypeArg <: B }
-}
-
-object ListFunctor extends Functor[List] {
- override def map[A, B](f: List { type TypeArg <: A }): List { type TypeArg <: B } = ???
-}
-