summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdriaan Moors <adriaan.moors@epfl.ch>2010-09-16 23:39:21 +0000
committerAdriaan Moors <adriaan.moors@epfl.ch>2010-09-16 23:39:21 +0000
commit7b4176c5be11085d0fdf2f1b0624e1829a1638ea (patch)
treea2a70dc2bbc49feb139e654a3cb46d454ea4151e
parentce16324ee09e17239423fe1665ade27af51b19c0 (diff)
downloadscala-7b4176c5be11085d0fdf2f1b0624e1829a1638ea.tar.gz
scala-7b4176c5be11085d0fdf2f1b0624e1829a1638ea.tar.bz2
scala-7b4176c5be11085d0fdf2f1b0624e1829a1638ea.zip
Merged revisions 23010-23012 via svnmerge from
https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk ........ r23010 | moors | 2010-09-17 00:26:04 +0200 (Fri, 17 Sep 2010) | 8 lines closes #1569, #3731: refactored dependent method types to get rid of debruijn indices and use singleton types instead. this is the core of the dependent types refactoring, no implicit or inference changes (one baffling discovery: resultType should drop annotations that don't subclass TypeConstraint, even in the trivial case... wow -- thanks to Tiark for helping me figure it out on a terrace in Barcelona TODO: probably need a more principled approach to the propagation of plugin type-annotations) review by odersky ........ r23011 | moors | 2010-09-17 00:26:24 +0200 (Fri, 17 Sep 2010) | 9 lines part 2 of the dependent method refactoring: improved interaction with implicit search (needed for oopsla paper) more to come in this area, see e.g. #3346 (stanford edsl stuff) reopens #13, which wasn't fixed properly before imo, anyway (have a look at -Xprint:typer output before this commit: a type that's not expressible in surface syntax is inferred -- also removed duplicate test file) closes #3731: co-evolve type alias type symbols when their rhs is updated and they are referenced by type selections (see typemap) review by odersky ........ r23012 | moors | 2010-09-17 01:01:19 +0200 (Fri, 17 Sep 2010) | 1 line argh -- merge failure?? -- this was fixed ........
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala11
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala23
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Scopes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala365
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala39
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala111
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala86
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala2
-rw-r--r--test/files/pos/bug1279a.scala6
-rw-r--r--test/files/run/constrained-types.check4
-rw-r--r--test/files/run/constrained-types.scala2
20 files changed, 460 insertions, 260 deletions
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index a9300ff304..de48ff9931 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -29,6 +29,16 @@ trait CompilationUnits { self: Global =>
/** all comments found in this compilation unit */
val comments = new ListBuffer[Comment]
+// def parseSettings() = {
+// val argsmarker = "SCALAC_ARGS"
+// if(comments nonEmpty) {
+// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
+// pragmas foreach { p =>
+// val i = p.text.indexOf(argsmarker)
+// if(i > 0)
+// }
+// }
+// }
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 4afa9433c1..94d3245d7a 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -517,10 +517,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
phasesSet += analyzer.namerFactory // note: types are there because otherwise
phasesSet += analyzer.packageObjects // consistency check after refchecks would fail.
phasesSet += analyzer.typerFactory
- phasesSet += superAccessors // add super accessors
- phasesSet += pickler // serialize symbol tables
- phasesSet += refchecks // perform reference and override checking, translate nested objects
- // phasesSet += devirtualize // Desugar virtual classes
+ phasesSet += superAccessors // add super accessors
+ phasesSet += pickler // serialize symbol tables
+ phasesSet += refchecks // perform reference and override checking, translate nested objects
+
+// if (false && settings.YvirtClasses)
+// phasesSet += devirtualize // Desugar virtual classes4
phasesSet += uncurry // uncurry, translate function values to anonymous classes
phasesSet += tailCalls // replace tail calls by jumps
@@ -691,6 +693,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
+// unit.parseSettings()
unitbuf += unit
compiledFiles += unit.source.file.path
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 41e93ae386..f43a1a6ae3 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -118,17 +118,20 @@ abstract class TreeGen {
else mkAttributedIdent(sym)
/** Replaces tree type with a stable type if possible */
- def stabilize(tree: Tree): Tree = tree match {
- case Ident(_) =>
- if (tree.symbol.isStable) tree.setType(singleType(tree.symbol.owner.thisType, tree.symbol))
- else tree
- case Select(qual, _) =>
- assert((tree.symbol ne null) && (qual.tpe ne null))
- if (tree.symbol.isStable && qual.tpe.isStable)
- tree.setType(singleType(qual.tpe, tree.symbol))
- else tree
+ def stabilize(tree: Tree): Tree = {
+ for(tp <- stableTypeFor(tree)) tree.tpe = tp
+ tree
+ }
+
+ /** Computes stable type for a tree if possible */
+ def stableTypeFor(tree: Tree): Option[Type] = tree match {
+ case Ident(_) if tree.symbol.isStable =>
+ Some(singleType(tree.symbol.owner.thisType, tree.symbol))
+ case Select(qual, _) if {assert((tree.symbol ne null) && (qual.tpe ne null));
+ tree.symbol.isStable && qual.tpe.isStable} =>
+ Some(singleType(qual.tpe, tree.symbol))
case _ =>
- tree
+ None
}
/** Cast `tree' to type `pt' */
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 131b0d0a03..61d45114a9 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1794,7 +1794,7 @@ self =>
val name = ident()
var bynamemod = 0
val tpt =
- if (settings.Xexperimental.value && !owner.isTypeName && in.token != COLON) {
+ if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
TypeTree()
} else { // XX-METHOD-INFER
accept(COLON)
@@ -2344,7 +2344,7 @@ self =>
else (accessModifierOpt(), paramClauses(name, classContextBounds, mods.hasFlag(Flags.CASE)))
var mods1 = mods
if (mods hasFlag Flags.TRAIT) {
- if (settings.Xexperimental.value && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
+ if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
syntaxError("classes are not allowed to be virtual", false)
}
@@ -2435,7 +2435,7 @@ self =>
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers,
vparamss: List[List[ValDef]], tstart: Int): Template = {
val (parents0, argss, self, body) =
- if (in.token == EXTENDS || settings.Xexperimental.value && (mods hasFlag Flags.TRAIT) && in.token == SUBTYPE) {
+ if (in.token == EXTENDS || settings.YvirtClasses && (mods hasFlag Flags.TRAIT) && in.token == SUBTYPE) {
in.nextToken()
template(mods hasFlag Flags.TRAIT)
} else if ((in.token == SUBTYPE) && (mods hasFlag Flags.TRAIT)) {
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 01f1d2c211..b175cb24ee 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -55,7 +55,6 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions and assumptions")
val elidebelow = IntSetting ("-Xelide-below", "Generate calls to @elidable-marked methods only if method priority is greater than argument.",
elidable.ASSERTION, None, elidable.byName.get(_))
- val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions")
val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes")
val future = BooleanSetting ("-Xfuture", "Turn on future language features")
val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot", "")
@@ -79,10 +78,18 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val Xshowobj = StringSetting ("-Xshow-object", "object", "Show object info", "")
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files", "scala.tools.nsc.io.SourceReader")
+
val Xwarnfatal = BooleanSetting ("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
val Xwarninit = BooleanSetting ("-Xwarninit", "Warn about possible changes in initialization semantics")
val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference")
+ // Experimental Extensions
+ val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions") .
+ withPostSetHook(_ => List(YdepMethTpes, YmethodInfer) foreach (_.value = true)) //YvirtClasses,
+ val YdepMethTpes = BooleanSetting ("-Ydependent-method-types", "Allow dependent method types")
+ val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods")
+ val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/Scopes.scala b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
index bc8b93ac2c..b5e23d61f0 100644
--- a/src/compiler/scala/tools/nsc/symtab/Scopes.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
@@ -217,7 +217,7 @@ trait Scopes {
if (e eq null) NoSymbol else e.sym
}
- /** Returns an iterator eidling every symbol with given name in this scope.
+ /** Returns an iterator yielding every symbol with given name in this scope.
*/
def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
var e = lookupEntry(name)
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index b3d2c56285..deffa1d852 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -264,9 +264,17 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
/** Is this type a structural refinement type (it 'refines' members that have not been inherited) */
def isStructuralRefinement: Boolean = false
+ /** Does this type depend immediately on an enclosing method parameter?
+ * i.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)
+ */
+ def isImmediatelyDependent: Boolean = false
+
/** Does this depend on an enclosing method parameter? */
def isDependent: Boolean = IsDependentCollector.collect(this)
+ /** True for WildcardType or BoundedWildcardType */
+ def isWildcard = false
+
/** The term symbol associated with the type
* Note that the symbol of the normalized type is returned (@see normalize)
*/
@@ -355,7 +363,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
*/
def remove(clazz: Symbol): Type = this
- def resultApprox: Type = ApproximateDeBruijnMap(resultType)
+ def resultApprox: Type = if(settings.YdepMethTpes.value) ApproximateDependentMap(resultType) else resultType
/** For a curried method or poly type its non-method result type,
* the type itself for all other types */
@@ -799,12 +807,18 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- // if this type contains type variables, get rid of them;
+ val suspension = TypeVar.Suspension
+ // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
+ // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
// without this, the matchesType call would lead to type variables on both sides
// of a subtyping/equality judgement, which can lead to recursive types being constructed.
// See (t0851) for a situation where this happens.
- if (!this.isGround)
- return typeVarToOriginMap(this).findMember(name, excludedFlags, requiredFlags, stableOnly)
+ if (!this.isGround) {
+ // make each type var in this type use its original type for comparisons instead of collecting constraints
+ for(tv@TypeVar(_, _) <- this) {
+ suspension suspend tv
+ }
+ }
incCounter(findMemberCount)
val start = startTimer(findMemberNanos)
@@ -835,6 +849,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
stopTimer(findMemberNanos, start)
+ suspension.resumeAll
return sym
} else if (member == NoSymbol) {
member = sym
@@ -876,6 +891,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
excluded = excludedFlags
} // while (continue)
stopTimer(findMemberNanos, start)
+ suspension.resumeAll
if (members eq null) {
if (member == NoSymbol) incCounter(noMemberCount)
member
@@ -1012,12 +1028,14 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
/** An object representing an unknown type */
case object WildcardType extends Type {
+ override def isWildcard = true
override def safeToString: String = "?"
// override def isNullable: Boolean = true
override def kind = "WildcardType"
}
case class BoundedWildcardType(override val bounds: TypeBounds) extends Type {
+ override def isWildcard = true
override def safeToString: String = "?" + bounds
override def kind = "BoundedWildcardType"
}
@@ -1071,13 +1089,13 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
else sym.tpe
}
- case class DeBruijnIndex(level: Int, paramId: Int) extends Type {
- override def isTrivial = true
- override def isStable = true
- override def safeToString = "<param "+level+"."+paramId+">"
- override def kind = "DeBruijnIndex"
- // todo: this should be a subtype, which forwards to underlying
- }
+ // case class DeBruijnIndex(level: Int, paramId: Int) extends Type {
+ // override def isTrivial = true
+ // override def isStable = true
+ // override def safeToString = "<param "+level+"."+paramId+">"
+ // override def kind = "DeBruijnIndex"
+ // // todo: this should be a subtype, which forwards to underlying
+ // }
/** A class for singleton types of the form &lt;prefix&gt;.&lt;sym.name&gt;.type.
* Cannot be created directly; one should always use
@@ -1101,6 +1119,9 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
underlyingCache
}
+ // more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
+ override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
+
override def isVolatile : Boolean = underlying.isVolatile && (!sym.isStable)
/*
override def narrow: Type = {
@@ -1190,9 +1211,8 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
case tv: TypeVar => tvs += tv
case _ =>
}
- val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
- val paramToVarMap = varToParamMap map (_.swap)
-
+ val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
+ val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
def apply(tp: Type) = varToParamMap get tp match {
case Some(sym) => sym.tpe
@@ -1559,7 +1579,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
* Cannot be created directly; one should always use `typeRef'
* for creation. (@M: Otherwise hashing breaks)
*
- * @M: Higher-kinded types are represented as TypeRefs with a symbol that has type parameters, but with args==List()
+ * @M: a higher-kinded type is represented as a TypeRef with sym.info.typeParams.nonEmpty, but args.isEmpty
* @param pre ...
* @param sym ...
* @param args ...
@@ -1717,32 +1737,41 @@ A type's typeSymbol should never be inspected directly.
private var normalized: Type = null
+ @inline private def betaReduce: Type = {
+ assert(sym.info.typeParams.length == typeArgs.length, this)
+ // isHKSubType0 introduces synthetic type params so that betaReduce can first apply sym.info to typeArgs before calling asSeenFrom
+ // asSeenFrom then skips synthetic type params, which are used to reduce HO subtyping to first-order subtyping, but which can't be instantiated from the given prefix and class
+ // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner) // this crashes pos/depmet_implicit_tpbetareduce.scala
+ transform(sym.info.resultType)
+ }
+
+ // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
+ // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
+ @inline private def etaExpand: Type = {
+ val tpars = sym.info.typeParams // must go through sym.info for typeParams to initialise symbol
+ PolyType(tpars, typeRef(pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
+ }
+
override def dealias: Type =
if (sym.isAliasType && sym.info.typeParams.length == args.length) {
- val xform = transform(sym.info.resultType)
- assert(xform ne this, this)
- xform.dealias
+ betaReduce.dealias
} else this
- override def remove(clazz: Symbol): Type =
- if (sym == clazz && !args.isEmpty) args.head else this
-
def normalize0: Type =
- if (isHigherKinded) {
- // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
- // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
- PolyType(sym.info.typeParams, typeRef(pre, sym, dummyArgs)) // must go through sym.info for typeParams
- } else if (sym.isAliasType) { // beta-reduce
- if(sym.info.typeParams.length == args.length) // don't do partial application
- transform(sym.info.resultType).normalize // cycles have been checked in typeRef
- else
- ErrorType
- } else if (sym.isRefinementClass) {
- sym.info.normalize // @MO to AM: OK?
- //@M I think this is okay, but changeset 12414 (which fixed #1241) re-introduced another bug (#2208)
- // see typedTypeConstructor in Typers
- } else {
- super.normalize
+ if (pre eq WildcardType) WildcardType // arises when argument-dependent types are approximated (see def depoly in implicits)
+ else if (isHigherKinded) etaExpand // eta-expand, subtyping relies on eta-expansion of higher-kinded types
+ else if (sym.isAliasType && sym.info.typeParams.length == args.length)
+ betaReduce.normalize // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
+ else if (sym.isRefinementClass)
+ sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
+ // else if (args nonEmpty) {
+ // val argsNorm = args mapConserve (_.dealias)
+ // if(argsNorm ne args) TypeRef(pre, sym, argsNorm)
+ // else this
+ // }
+ else {
+ if(sym.isAliasType) ErrorType //println("!!error: "+(pre, sym, sym.info, sym.info.typeParams, args))
+ else super.normalize
}
// track number of type parameters that we saw when caching normalization,
@@ -1750,8 +1779,7 @@ A type's typeSymbol should never be inspected directly.
// TODO: this would not be necessary if we could replace the call to sym.unsafeTypeParams in typeParamsDirect
// by a call to sym.typeParams, but need to verify that that does not lead to spurious "illegal cycle" errors
// the need for refreshing the cache is illustrated by #2278
- // TODO: no test case in the suite because don't know how to tell partest to compile in different runs,
- // and in a specific order
+ // TODO: test case that is compiled in a specific order and in different runs
private var normalizeTyparCount = -1
override def normalize: Type = {
@@ -1887,8 +1915,11 @@ A type's typeSymbol should never be inspected directly.
*/
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type {
- override val isTrivial: Boolean =
- params.forall(_.tpe.isTrivial) && resultType.isTrivial
+ override def isTrivial: Boolean = isTrivial0
+ private lazy val isTrivial0 =
+ resultType.isTrivial && params.forall{p => p.tpe.isTrivial && (
+ !settings.YdepMethTpes.value || !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
+ }
def isImplicit = params.nonEmpty && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
@@ -1902,30 +1933,36 @@ A type's typeSymbol should never be inspected directly.
override def boundSyms = params ::: resultType.boundSyms
- override def resultType(actuals: List[Type]) = {
- val map = new InstantiateDeBruijnMap(actuals)
- val rawResTpe = map.apply(resultType)
-
- if (phase.erasedTypes)
- rawResTpe
- else
- existentialAbstraction(map.existentialsNeeded, rawResTpe)
+ // this is needed for plugins to work correctly, only TypeConstraint annotations are supposed to be carried over
+ // TODO: this should probably be handled in a more structured way in adapt -- remove this map in resultType and watch the continuations tests fail
+ object dropNonContraintAnnotations extends TypeMap {
+ override val dropNonConstraintAnnotations = true
+ def apply(x: Type) = mapOver(x)
}
- override def finalResultType: Type = resultType.finalResultType
-
- private def dependentToString(base: Int): String = {
- val params = for ((pt, n) <- paramTypes.zipWithIndex) yield "x$"+n+":"+pt
- val res = resultType match {
- case mt: MethodType => mt.dependentToString(base + params.length)
- case rt => rt.toString
+ override def resultType(actuals: List[Type]) =
+ if(isTrivial) dropNonContraintAnnotations(resultType)
+ else {
+ if(actuals.length == params.length) {
+ val idm = new InstantiateDependentMap(params, actuals)
+ val res = idm(resultType)
+ // println("resultTypeDep "+(params, actuals, resultType, idm.existentialsNeeded, "\n= "+ res))
+ existentialAbstraction(idm.existentialsNeeded, res)
+ } else {
+ // Thread.dumpStack()
+ // println("resultType "+(params, actuals, resultType))
+ if (phase.erasedTypes) resultType
+ else existentialAbstraction(params, resultType)
+ }
}
- params.mkString("(", ",", ")")+res
- }
+
+ // implicit args can only be depended on in result type: TODO this may be generalised so that the only constraint is dependencies are acyclic
+ def approximate: MethodType = MethodType(params, resultApprox)
+
+ override def finalResultType: Type = resultType.finalResultType
override def safeToString: String =
- if (resultType.isDependent) dependentToString(0)
- else params.map(_.defString).mkString("(", ",", ")") + resultType
+ params.map(_.defString).mkString("(", ",", ")") + resultType
override def cloneInfo(owner: Symbol) = {
val vparams = cloneSymbols(params, owner)
@@ -2145,9 +2182,25 @@ A type's typeSymbol should never be inspected directly.
// then, constr became mutable (to support UndoLog, I guess), but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
+ // encapsulate suspension so we can automatically link the suspension of cloned typevars to their original if this turns out to be necessary
+ def Suspension = new Suspension
+ class Suspension {
+ private val suspended = collection.mutable.HashSet[TypeVar]()
+ def suspend(tv: TypeVar): Unit = {
+ tv.suspended = true
+ suspended += tv
+ }
+ def resumeAll: Unit = {
+ for(tv <- suspended) {
+ tv.suspended = false
+ }
+ suspended.clear
+ }
+ }
+
def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
+ def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams) // TODO why not initialise TypeConstraint with bounds of tparam?
def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) = new TypeVar(origin, constr, args, params)
}
@@ -2206,6 +2259,9 @@ A type's typeSymbol should never be inspected directly.
constr.addHiBound(tp, numBound)
}
+ // ignore subtyping&equality checks while true -- see findMember
+ private[TypeVar] var suspended = false
+
/** Called from isSubtype0 when a TypeVar is involved in a subtyping check.
* if isLowerBound is true,
* registerBound returns whether this TypeVar could plausibly be a supertype of tp and,
@@ -2238,7 +2294,8 @@ A type's typeSymbol should never be inspected directly.
if(isLowerBound) isSubArgs(args1, args2, params)
else isSubArgs(args2, args1, params)
- if (constr.instValid) // type var is already set
+ if (suspended) checkSubtype(tp, origin)
+ else if (constr.instValid) // type var is already set
checkSubtype(tp, constr.inst)
else isRelatable(tp) && {
if(params.isEmpty) { // type var has kind *
@@ -2263,7 +2320,8 @@ A type's typeSymbol should never be inspected directly.
if(typeVarLHS) constr.inst =:= tp
else tp =:= constr.inst
- if (constr.instValid) checkIsSameType(tp)
+ if (suspended) tp =:= origin
+ else if (constr.instValid) checkIsSameType(tp)
else isRelatable(tp) && {
undoLog record this
@@ -2275,6 +2333,20 @@ A type's typeSymbol should never be inspected directly.
}
}
+ /**
+ * ?A.T =:= tp is rewritten as the constraint ?A <: {type T = tp}
+ *
+ * TODO: make these constraints count (incorporate them into implicit search in applyImplicitArgs)
+ * (T corresponds to @param sym)
+ */
+ def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
+ val bound = refinedType(List(WildcardType), NoSymbol)
+ val bsym = bound.typeSymbol.newAliasType(NoPosition, sym.name)
+ bsym setInfo tp
+ bound.decls enter bsym
+ registerBound(bound, false)
+ }
+
/** Can this variable be related in a constraint to type `tp'?
* This is not the case if `tp' contains type skolems whose
* skolemization level is higher than the level of this variable.
@@ -2312,7 +2384,10 @@ A type's typeSymbol should never be inspected directly.
override def isVolatile = origin.isVolatile
override def kind = "TypeVar"
- def cloneInternal = TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ def cloneInternal = {
+ assert(!suspended) // cloning a suspended type variable when it's suspended will cause the clone to never be resumed with the current implementation
+ TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ }
}
/** A type carrying some annotations. Created by the typechecker
@@ -2485,9 +2560,17 @@ A type's typeSymbol should never be inspected directly.
* todo: see how we can clean this up a bit
*/
def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
- var sym1 = if (sym.isAbstractType) rebind(pre, sym) else sym
+ def rebindTR(pre: Type, sym: Symbol): Symbol = {
+ if(sym.isAbstractType) rebind(pre, sym) else sym
+ // type alias selections are rebound in TypeMap ("coevolved", actually -- see #3731)
+ // e.g., when type parameters that are referenced by the alias are instantiated in the prefix
+ // see pos/depmet_rebind_typealias
+ }
+ val sym1 = rebindTR(pre, sym)
+
def transform(tp: Type): Type =
tp.resultType.asSeenFrom(pre, sym1.owner).instantiateTypeParams(sym1.typeParams, args)
+
if (sym1.isAliasType && sym1.info.typeParams.length == args.length) {
if (!sym1.lockOK)
throw new TypeError("illegal cyclic reference involving " + sym1)
@@ -2504,8 +2587,7 @@ A type's typeSymbol should never be inspected directly.
} else {
val pre1 = removeSuper(pre, sym1)
if (pre1 ne pre) {
- if (sym1.isAbstractType) sym1 = rebind(pre1, sym1)
- typeRef(pre1, sym1, args)
+ typeRef(pre1, rebindTR(pre1, sym1), args)
}
else if (sym1.isClass && pre.isInstanceOf[CompoundType]) {
// sharpen prefix so that it is maximal and still contains the class.
@@ -2813,6 +2895,28 @@ A type's typeSymbol should never be inspected directly.
case _ => false
}
+ // #3731: return sym1 for which holds: pre bound sym.name to sym and pre1 now binds sym.name to sym1, conceptually exactly the same symbol as sym
+ // the selection of sym on pre must be updated to the selection of sym1 on pre1,
+ // since sym's info was probably updated by the TypeMap to yield a new symbol sym1 with transformed info
+ // @returns sym1
+ protected def coevolveSym(pre: Type, pre1: Type, sym: Symbol): Symbol =
+ if((pre ne pre1) && sym.isAliasType) // only need to rebind type aliases here, as typeRef already handles abstract types (they are allowed to be rebound more liberally)
+ (pre, pre1) match {
+ case (RefinedType(_, decls), RefinedType(_, decls1)) => // don't look at parents -- it would be an error to override alias types anyway
+ //val sym1 =
+ decls1.lookup(sym.name)
+// assert(decls.lookupAll(sym.name).toList.length == 1)
+// assert(decls1.lookupAll(sym.name).toList.length == 1)
+// assert(sym1.isAliasType)
+// println("coevolved "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
+// sym1
+ case _ => // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
+// val sym1 = pre1.nonPrivateMember(sym.name).suchThat(sym => sym.isAliasType)
+// println("??coevolve "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
+ sym
+ }
+ else sym
+
/** Map this function over given type */
def mapOver(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
@@ -2825,7 +2929,7 @@ A type's typeSymbol should never be inspected directly.
else mapOverArgs(args, tparams)
}
if ((pre1 eq pre) && (args1 eq args)) tp
- else typeRef(pre1, sym, args1)
+ else typeRef(pre1, coevolveSym(pre, pre1, sym), args1)
case ThisType(_) => tp
case SingleType(pre, sym) =>
if (sym.isPackageClass) tp // short path
@@ -2850,7 +2954,7 @@ A type's typeSymbol should never be inspected directly.
if ((tparams1 eq tparams) && (result1 eq result)) tp
else PolyType(tparams1, result1.substSym(tparams, tparams1))
case ConstantType(_) => tp
- case DeBruijnIndex(_, _) => tp
+ // case DeBruijnIndex(_, _) => tp
case SuperType(thistp, supertp) =>
val thistp1 = this(thistp)
val supertp1 = this(supertp)
@@ -3212,6 +3316,7 @@ A type's typeSymbol should never be inspected directly.
/** A base class to compute all substitutions */
abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(from.length == to.length, "Unsound substitution from "+ from +" to "+ to)
/** Are `sym' and `sym1' the same.
* Can be tuned by subclasses.
@@ -3227,7 +3332,7 @@ A type's typeSymbol should never be inspected directly.
else if (matches(from.head, sym)) toType(tp, to.head)
else subst(tp, sym, from.tail, to.tail)
- private def renameBoundSyms(tp: Type): Type = tp match {
+ protected def renameBoundSyms(tp: Type): Type = tp match {
case MethodType(ps, restp) =>
val ps1 = cloneSymbols(ps)
copyMethodType(tp, ps1, renameBoundSyms(restp.substSym(ps, ps1)))
@@ -3374,11 +3479,35 @@ A type's typeSymbol should never be inspected directly.
}
}
+// dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if(tp isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if(tp isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+/*
/** Most of the implementation for MethodType.resultType. The
* caller also needs to existentially quantify over the
* variables in existentialsNeeded.
*/
class InstantiateDeBruijnMap(actuals: List[Type]) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case DeBruijnIndex(level, pid) =>
+ if (level == 1)
+ if (pid < actuals.length) actuals(pid) else tp
+ else DeBruijnIndex(level - 1, pid)
+ case _ =>
+ mapOver(tp)
+ }
+
override val dropNonConstraintAnnotations = true
private var existSyms = immutable.Map.empty[Int, Symbol]
@@ -3401,15 +3530,6 @@ A type's typeSymbol should never be inspected directly.
sym
}
- def apply(tp: Type): Type = tp match {
- case DeBruijnIndex(level, pid) =>
- if (level == 1)
- if (pid < actuals.length) actuals(pid) else tp
- else DeBruijnIndex(level - 1, pid)
- case _ =>
- mapOver(tp)
- }
-
override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
object treeTrans extends TypeMapTransformer {
override def transform(tree: Tree): Tree =
@@ -3442,16 +3562,71 @@ A type's typeSymbol should never be inspected directly.
treeTrans.transform(arg)
}
}
+*/
- object ApproximateDeBruijnMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case DeBruijnIndex(level, pid) =>
- WildcardType
- case _ =>
- mapOver(tp)
+ class InstantiateDependentMap(params: List[Symbol], actuals: List[Type]) extends SubstTypeMap(params, actuals) {
+ override protected def renameBoundSyms(tp: Type): Type = tp match {
+ case MethodType(ps, restp) => tp // the whole point of this substitution is to instantiate these args
+ case _ => super.renameBoundSyms(tp)
+ }
+ // TODO: should we optimise this? only need to consider singletontypes
+
+ override val dropNonConstraintAnnotations = true
+
+ def existentialsNeeded: List[Symbol] = existSyms.filter(_ ne null).toList
+
+ private val existSyms: Array[Symbol] = new Array(actuals.length)
+ private def haveExistential(i: Int) = {assert((i >= 0) && (i <= actuals.length)); existSyms(i) ne null}
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ def existSymFor(actualIdx: Int) =
+ if (haveExistential(actualIdx)) existSyms(actualIdx)
+ else {
+ val oldSym = params(actualIdx)
+ val symowner = oldSym.owner
+ val bound = singletonBounds(actuals(actualIdx))
+
+ val sym = symowner.newExistential(oldSym.pos, oldSym.name+".type")
+ sym.setInfo(bound)
+ sym.setFlag(oldSym.flags)
+
+ existSyms(actualIdx) = sym
+ sym
+ }
+
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+ override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = {
+ tree match {
+ case RefParamAt(pid) =>
+ if(actuals(pid) isStable) mkAttributedQualifier(actuals(pid), tree.symbol)
+ else {
+ val sym = existSymFor(pid)
+ (Ident(sym.name)
+ copyAttrs tree
+ setType typeRef(NoPrefix, sym, Nil))
+ }
+ case _ => super.transform(tree)
+ }
+ }
+ object RefParamAt {
+ def unapply(tree: Tree): Option[(Int)] = tree match {
+ case Ident(_) =>
+ val pid = params indexOf tree.symbol
+ if(pid != -1) Some((pid)) else None
+ case _ => None
+ }
+ }
+ }
+
+ treeTrans.transform(arg)
}
}
+
object StripAnnotationsMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, atp, _) =>
@@ -3556,15 +3731,6 @@ A type's typeSymbol should never be inspected directly.
}
}
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- tp match {
- case DeBruijnIndex(_, _) => result = true
- case _ => if (!result) mapOver(tp)
- }
- }
- }
-
/** A map to compute the most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given type.
*/
@@ -4005,9 +4171,14 @@ A type's typeSymbol should never be inspected directly.
case tr1: TypeRef =>
tp2 match {
case tr2: TypeRef =>
- return equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
+ return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))
+ isSameTypes(tr1.args, tr2.args))) ||
+ ((tr1.pre, tr2.pre) match {
+ case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
+ case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
+ case _ => false
+ })
case _ =>
}
case tt1: ThisType =>
@@ -4055,7 +4226,7 @@ A type's typeSymbol should never be inspected directly.
case mt1: MethodType =>
tp2 match {
case mt2: MethodType =>
- // new dependent types: probably fix this, use substSym as done for PolyType
+ // DEPMETTODO new dependent types: probably fix this, use substSym as done for PolyType
return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
mt1.resultType =:= mt2.resultType &&
mt1.isImplicit == mt2.isImplicit
@@ -4231,7 +4402,7 @@ A type's typeSymbol should never be inspected directly.
res1 <:< res2.substSym(tparams2, tparams1)
} else { // normalized higher-kinded type
//@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
- val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
+ val tpsFresh = cloneSymbols(tparams1)
(tparams1 corresponds tparams2)((p1, p2) =>
p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) && // @PP: corresponds
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 91141ce4d6..e3bd67ec81 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -164,7 +164,7 @@ abstract class Pickler extends SubComponent {
*/
private def putType(tp: Type): Unit = if (putEntry(tp)) {
tp match {
- case NoType | NoPrefix | DeBruijnIndex(_, _) =>
+ case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
;
case ThisType(sym) =>
putSymbol(sym)
@@ -592,8 +592,8 @@ abstract class Pickler extends SubComponent {
writeRef(restpe); writeRefs(tparams); POLYtpe
case ExistentialType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- case DeBruijnIndex(l, i) =>
- writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
+ // case DeBruijnIndex(l, i) =>
+ // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
case c @ Constant(_) =>
if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
@@ -1042,8 +1042,8 @@ abstract class Pickler extends SubComponent {
case ExistentialType(tparams, restpe) =>
print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
print("||| "+entry)
- case DeBruijnIndex(l, i) =>
- print("DEBRUIJNINDEXtpe "); print(l+" "+i)
+ // case DeBruijnIndex(l, i) =>
+ // print("DEBRUIJNINDEXtpe "); print(l+" "+i)
case c @ Constant(_) =>
print("LITERAL ")
if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index a94838e163..05ffc6c7b7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -30,14 +30,14 @@ abstract class UnPickler extends reflect.generic.UnPickler {
protected override def debug = settings.debug.value
- override def noSuchTypeTag(tag: Int, end: Int): Type = {
- tag match {
- case DEBRUIJNINDEXtpe =>
- DeBruijnIndex(readNat(), readNat())
- case _ =>
- super.noSuchTypeTag(tag, end)
- }
- }
+ // override def noSuchTypeTag(tag: Int, end: Int): Type = {
+ // tag match {
+ // case DEBRUIJNINDEXtpe =>
+ // DeBruijnIndex(readNat(), readNat())
+ // case _ =>
+ // super.noSuchTypeTag(tag, end)
+ // }
+ // }
override protected def errorMissingRequirement(name: Name, owner: Symbol) =
errorMissingRequirement(
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 814434d46c..635a1983e2 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -165,8 +165,9 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
cloneSymbols(params) map (p => p.setInfo(apply(p.tpe))),
if (restpe.typeSymbol == UnitClass)
erasedTypeRef(UnitClass)
- else if (settings.Xexperimental.value)
- apply(mt.resultType(params map (_.tpe))) // this gets rid of DeBruijnTypes
+ else if (settings.YdepMethTpes.value)
+ // this replaces each typeref that refers to an argument by the type `p.tpe` of the actual argument p (p in params)
+ apply(mt.resultType(params map (_.tpe)))
else
apply(restpe))
case RefinedType(parents, decls) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index adcae7d9b1..392223e1e2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -234,11 +234,14 @@ self: Analyzer =>
private def tparamsToWildcards(tp: Type, tparams: List[Symbol]) =
tp.instantiateTypeParams(tparams, tparams map (t => WildcardType))
- /* Map a polytype to one in which all type parameters are replaced by wildcards.
+ /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
+ * so we have to approximate (otherwise it is excluded a priori).
*/
private def depoly(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) => tparamsToWildcards(restpe, tparams)
- case _ => tp
+ case PolyType(tparams, restpe) => tparamsToWildcards(ApproximateDependentMap(restpe), tparams)
+ case _ => ApproximateDependentMap(tp)
}
/** Does type `dtor` dominate type `dted`?
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 86faddab37..98bb1828b2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -30,11 +30,6 @@ trait Infer {
def isVarArgs(params: List[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe)
def isVarArgTpes(formals: List[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last)
- def isWildcard(tp: Type) = tp match {
- case WildcardType | BoundedWildcardType(_) => true
- case _ => false
- }
-
/** The formal parameter types corresponding to <code>formals</code>.
* If <code>formals</code> has a repeated last parameter, a list of
* (nargs - params.length + 1) copies of its type is returned.
@@ -176,9 +171,10 @@ trait Infer {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case mt @ MethodType(params, restpe) if (!restpe.isDependent) =>
- if (mt.isImplicit) normalize(restpe)
- else functionType(params map (_.tpe), normalize(restpe))
+ case mt @ MethodType(params, restpe) if mt.isImplicit =>
+ normalize(restpe)
+ case mt @ MethodType(params, restpe) if !restpe.isDependent =>
+ functionType(params map (_.tpe), normalize(restpe))
case PolyType(List(), restpe) => // nullary method type
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
@@ -566,7 +562,7 @@ trait Infer {
(tparams, targs).zipped.map{ (tparam, targ) =>
if (targ.typeSymbol == NothingClass &&
- (isWildcard(restpe) || notCovariantIn(tparam, restpe))) {
+ (restpe.isWildcard || notCovariantIn(tparam, restpe))) {
tparam -> None
} else {
tparam -> Some(
@@ -1078,20 +1074,31 @@ trait Infer {
substExpr(tree, undetparams, targs, lenientPt)
}
- /** Substitute free type variables `undetparams; of polymorphic expression
- * <code>tree</code>, given prototype <code>pt</code>.
- *
- * @param tree ...
- * @param undetparams ...
- * @param pt ...
+ /** Infer type arguments for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type, keepNothings: Boolean): List[Symbol] = {
if (inferInfo)
println("infer expr instance "+tree+":"+tree.tpe+"\n"+
" tparams = "+tparams+"\n"+
" pt = "+pt)
- val targs = exprTypeArgs(tparams, tree.tpe, pt)
+ substAdjustedArgs(tree, tparams, pt, exprTypeArgs(tparams, tree.tpe, pt), keepNothings)
+ }
+
+ /** Infer type arguments for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
+ * Use specified type `treeTp` instead of `tree.tp`
+ */
+ def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type, treeTp: Type, keepNothings: Boolean): List[Symbol] = {
+ if (inferInfo)
+ println("infer expr instance "+tree+":"+tree.tpe+"\n"+
+ " tparams = "+tparams+"\n"+
+ " pt = "+pt)
+ substAdjustedArgs(tree, tparams, pt, exprTypeArgs(tparams, treeTp, pt), keepNothings)
+ }
+ /** Substitute tparams to targs, after adjustment by adjustTypeArgs,
+ * return tparams that were not determined
+ */
+ def substAdjustedArgs(tree: Tree, tparams: List[Symbol], pt: Type, targs: List[Type], keepNothings: Boolean): List[Symbol] = {
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targs, pt)
List()
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index b5d3a939e1..262a760958 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -767,99 +767,68 @@ trait Namers { self: Analyzer =>
val tparamSyms = typer.reenterTypeParams(tparams)
// since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
var vparamSymss = enterValueParams(meth, vparamss)
+ // DEPMETTODO: do we need to skolemize value parameter symbols?
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
tpt defineType context.enclClass.owner.tpe
tpt setPos meth.pos.focus
}
- def convertToDeBruijn(vparams: List[Symbol], level: Int): TypeMap = new TypeMap {
- def debruijnFor(param: Symbol) =
- DeBruijnIndex(level, vparams indexOf param)
- def apply(tp: Type) = {
- tp match {
- case SingleType(_, sym) =>
- if (settings.Xexperimental.value && sym.owner == meth && (vparams contains sym)) {
-/*
- if (sym hasFlag IMPLICIT) {
- context.error(sym.pos, "illegal type dependence on implicit parameter")
- ErrorType
- } else
-*/
- debruijnFor(sym)
- } else tp
- case MethodType(params, restpe) =>
- val params1 = this.mapOver(params)
- val restpe1 = convertToDeBruijn(vparams, level + 1)(restpe)
- if ((params1 eq params) && (restpe1 eq restpe)) tp
- else copyMethodType(tp, params1, restpe1)
- case _ =>
- mapOver(tp)
- }
- }
-
- // AnnotatedTypes can contain trees in the annotation arguments. When accessing a
- // parameter in an annotation, set the type of the Ident to the DeBruijnIndex
- object treeTrans extends TypeMapTransformer {
- override def transform(tree: Tree): Tree =
- tree match {
- case Ident(name) if (vparams contains tree.symbol) =>
- val dtpe = debruijnFor(tree.symbol)
- val dsym =
- context.owner.newLocalDummy(tree.symbol.pos)
- .newValue(tree.symbol.pos, name)
-
- dsym.setFlag(PARAM)
- dsym.setInfo(dtpe)
- Ident(name).setSymbol(dsym).copyAttrs(tree).setType(dtpe)
- case tree => super.transform(tree)
- }
- }
-
- // for type annotations (which may contain trees)
- override def mapOver(arg: Tree) = Some(treeTrans.transform(arg))
- }
-
- val checkDependencies: TypeTraverser = new TypeTraverser {
- def traverse(tp: Type) = {
- tp match {
- case SingleType(_, sym) =>
- if (sym.owner == meth && (vparamSymss exists (_ contains sym)))
- context.error(
- sym.pos,
- "illegal dependent method type"+
- (if (settings.Xexperimental.value)
- ": parameter appears in the type of another parameter in the same section or an earlier one"
- else ""))
- case _ =>
- mapOver(tp)
- }
- this
- }
- }
/** Called for all value parameter lists, right to left
* @param vparams the symbols of one parameter list
* @param restpe the result type (possibly a MethodType)
*/
def makeMethodType(vparams: List[Symbol], restpe: Type) = {
+ // TODODEPMET: check that we actually don't need to do anything here
// new dependent method types: probably OK already, since 'enterValueParams' above
// enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
// check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
// so re-use / adapt that)
val params = vparams map (vparam =>
if (meth hasFlag JAVA) vparam.setInfo(objToAny(vparam.tpe)) else vparam)
- val restpe1 = convertToDeBruijn(vparams, 1)(restpe) // new dependent types: replace symbols in restpe with the ones in vparams
- if (meth hasFlag JAVA) JavaMethodType(params, restpe1)
- else MethodType(params, restpe1)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ if (meth hasFlag JAVA) JavaMethodType(params, restpe)
+ else MethodType(params, restpe)
}
- def thisMethodType(restpe: Type) =
+ def thisMethodType(restpe: Type) = {
+ import scala.collection.mutable.ListBuffer
+ val okParams = ListBuffer[Symbol]()
+ // can we relax these restrictions? see test/files/pos/depmet_implicit_oopsla_session_2.scala and neg/depmet_try_implicit.scala for motivation
+ // should allow forward references since type selections on implicit args are like type parameters:
+ // def foo[T](a: T, x: w.T2)(implicit w: ComputeT2[T])
+ // is more compact than: def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2])
+ // moreover, the latter is not an encoding of the former, which hides type inference of T2, so you can specify T while T2 is purely computed
+ val checkDependencies: TypeTraverser = new TypeTraverser {
+ def traverse(tp: Type) = {
+ tp match {
+ case SingleType(_, sym) =>
+ if (sym.owner == meth && sym.isValueParameter && !(okParams contains sym))
+ context.error(
+ sym.pos,
+ "illegal dependent method type"+
+ (if (settings.YdepMethTpes.value)
+ ": parameter appears in the type of another parameter in the same section or an earlier one"
+ else ""))
+ case _ =>
+ mapOver(tp)
+ }
+ this
+ }
+ }
+ for(vps <- vparamSymss) {
+ for(p <- vps) checkDependencies(p.info)
+ if(settings.YdepMethTpes.value) okParams ++= vps // can only refer to symbols in earlier parameter sections (if the extension is enabled)
+ }
+ checkDependencies(restpe) // DEPMETTODO: check not needed when they become on by default
+
polyType(
- tparamSyms, // deSkolemized symbols
- if (vparamSymss.isEmpty) PolyType(List(), restpe)
+ tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
+ if (vparamSymss.isEmpty) PolyType(List(), restpe) // nullary method type
// vparamss refer (if they do) to skolemized tparams
- else checkDependencies((vparamSymss :\ restpe) (makeMethodType)))
+ else (vparamSymss :\ restpe) (makeMethodType))
+ }
var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
val site = meth.owner.thisType
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 3a989a6549..da818e7cea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -683,7 +683,7 @@ abstract class RefChecks extends InfoTransform {
case NoPrefix => ;
case ThisType(_) => ;
case ConstantType(_) => ;
- case DeBruijnIndex(_, _) => ;
+ // case DeBruijnIndex(_, _) => ;
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 1166f62ddb..be576289f6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -139,7 +139,7 @@ trait TypeDiagnostics {
def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
- def resType = if (isWildcard(pt)) "" else " with expected result type " + pt
+ def resType = if (pt isWildcard) "" else " with expected result type " + pt
def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
withDisambiguation(allTypes: _*) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8c1ac9a816..d58f881200 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -178,18 +178,12 @@ trait Typers { self: Analyzer =>
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
case MethodType(params, _) =>
- var positional = true
val argResultsBuff = new ListBuffer[SearchResult]()
+ val argBuff = new ListBuffer[Tree]()
- // apply the substitutions (undet type param -> type) that were determined
- // by implicit resolution of implicit arguments on the left of this argument
- for(param <- params) {
- var paramTp = param.tpe
- for(ar <- argResultsBuff)
- paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
-
- argResultsBuff += inferImplicit(fun, paramTp, true, false, context)
- }
+ def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
+ def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
+ var mkArg: (Tree, Name) => Tree = mkPositionalArg
def errorMessage(paramName: Name, paramTp: Type) =
paramTp.typeSymbol match {
@@ -200,23 +194,40 @@ trait Typers { self: Analyzer =>
else "parameter "+paramName+": ")+paramTp
}
- val argResults = argResultsBuff.toList
- val args = argResults.zip(params) flatMap {
- case (arg, param) =>
- if (arg != SearchFailure) {
- if (positional) List(arg.tree)
- else List(atPos(arg.tree.pos)(new AssignOrNamedArg(Ident(param.name), (arg.tree))))
- } else {
- if (!param.hasFlag(DEFAULTPARAM))
- context.error(fun.pos, errorMessage(param.name, param.tpe))
- positional = false
- Nil
- }
+ // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
+ //
+ // apply the substitutions (undet type param -> type) that were determined
+ // by implicit resolution of implicit arguments on the left of this argument
+ for(param <- params) {
+ var paramTp = param.tpe
+ for(ar <- argResultsBuff)
+ paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
+
+ val res = inferImplicit(fun, paramTp, true, false, context)
+ argResultsBuff += res
+
+ if (res != SearchFailure) {
+ argBuff += mkArg(res.tree, param.name)
+ } else {
+ mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+ if (!param.hasFlag(DEFAULTPARAM))
+ context.error(fun.pos, errorMessage(param.name, param.tpe))
+ /* else {
+ TODO: alternative (to expose implicit search failure more) -->
+ resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg
+ for (ar <- argResultsBuff) ar.subst traverse defaultVal
+ val targs = exprTypeArgs(context.undetparams, defaultVal.tpe, paramTp)
+ substExpr(tree, tparams, targs, pt)
+ }*/
+ }
}
- for (s <- argResults map (_.subst)) {
- s traverse fun
- for (arg <- args) s traverse arg
+
+ val args = argBuff.toList
+ for (ar <- argResultsBuff) {
+ ar.subst traverse fun
+ for (arg <- args) ar.subst traverse arg
}
+
new ApplyToImplicitArgs(fun, args) setPos fun.pos
case ErrorType =>
fun
@@ -814,10 +825,20 @@ trait Typers { self: Analyzer =>
context.undetparams = context.undetparams ::: tparams1
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- if (context.undetparams nonEmpty) // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
- // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
- // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
- context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt, false) // false: retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
+ if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
+ // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
+ context.undetparams =
+ inferExprInstance(tree, context.extractUndetparams(), pt,
+ // approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
+ if(settings.YdepMethTpes.value) mt.approximate else mt,
+ // if we are looking for a manifest, instantiate type to Nothing anyway,
+ // as we would get ambiguity errors otherwise. Example
+ // Looking for a manifest of Nil: This mas many potential types,
+ // so we need to instantiate to minimal type List[Nothing].
+ false) // false: retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
+ }
+
val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree))
if (original != EmptyTree && pt != WildcardType)
typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match {
@@ -1746,7 +1767,7 @@ trait Typers { self: Analyzer =>
error(vparam1.pos, "*-parameter must come last")
var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
- if (!settings.Xexperimental.value) {
+ if (!settings.YdepMethTpes.value) {
for (vparams <- vparamss1; vparam <- vparams) {
checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
}
@@ -2402,7 +2423,10 @@ trait Typers { self: Analyzer =>
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
val args1 = typedArgs(args, argMode(fun, mode), paramTypes, formals)
- val restpe = mt.resultType(args1 map (_.tpe)) // instantiate dependent method types
+ // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
+ // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
+ // precise(foo) : foo.type => foo.type
+ val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if ((mode & PATTERNmode) != 0) => rtp
case _ => tp
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 82911892d9..458116845d 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -96,7 +96,7 @@ object ShowPickled extends Names {
case ANNOTATEDtpe => "ANNOTATEDtpe"
case ANNOTINFO => "ANNOTINFO"
case ANNOTARGARRAY => "ANNOTARGARRAY"
- case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
+ // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
case EXISTENTIALtpe => "EXISTENTIALtpe"
case TREE => "TREE"
case MODIFIERS => "MODIFIERS"
diff --git a/test/files/pos/bug1279a.scala b/test/files/pos/bug1279a.scala
index 7568d3afcd..9212b583d4 100644
--- a/test/files/pos/bug1279a.scala
+++ b/test/files/pos/bug1279a.scala
@@ -1,3 +1,4 @@
+// see #13
// providing the type parameter in the recursive call to all4Impl
// avoids the problem
@@ -31,8 +32,9 @@ abstract class M
object Unrelated
{
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
+ // TODO!!! fix this bug for real, it compiles successfully, but weird types are inferred
+ // def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
// compiles successfully
-// def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
+ def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index cc00a7c46b..8050017659 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -52,8 +52,8 @@ val y = b.y // should keep the annotation
y: Int @Annot(Stuff.x) = 10
-----
-def m(x: String): String @Annot(x) = x // m should be annotated with a debruijn
-m: (x$0:String)String @Annot(x)
+def m(x: String): String @Annot(x) = x
+m: (x: String)String @Annot(x)
-----
val three = "three"
diff --git a/test/files/run/constrained-types.scala b/test/files/run/constrained-types.scala
index 59fd0b1b8c..c03c144ad1 100644
--- a/test/files/run/constrained-types.scala
+++ b/test/files/run/constrained-types.scala
@@ -45,7 +45,7 @@ object Test {
"val y = b.y // should keep the annotation",
- "def m(x: String): String @Annot(x) = x // m should be annotated with a debruijn",
+ "def m(x: String): String @Annot(x) = x",
"val three = \"three\"",
"val three2 = m(three:three.type) // should change x to three",
"var four = \"four\"",