diff options
29 files changed, 957 insertions, 108 deletions
diff --git a/docs/SyntaxSummary.txt b/docs/SyntaxSummary.txt index 11f23da94..d4f7ceade 100644 --- a/docs/SyntaxSummary.txt +++ b/docs/SyntaxSummary.txt @@ -103,7 +103,7 @@ grammar. RefinedType ::= WithType {[nl] Refinement} RefinedTypeTree(t, ds) WithType ::= AnnotType {`with' AnnotType} (deprecated) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) - SimpleType ::= SimpleType TypeArgs AppliedTypeTree(t, args) + SimpleType ::= SimpleType (TypeArgs | NamedTypeArgs) AppliedTypeTree(t, args) | SimpleType `#' id SelectFromTypeTree(t, name) | StableId | Path `.' `type' SingletonTypeTree(p) @@ -118,6 +118,8 @@ grammar. ParamType ::= [`=>'] ParamValueType ParamValueType ::= Type [`*'] PostfixOp(t, "*") TypeArgs ::= `[' ArgTypes `]' ts + NamedTypeArg ::= id `=' ArgType NamedArg(id, t) + NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' nts Refinement ::= `{' [Dcl] {semi [Dcl]} `}' ds TypeBounds ::= [`>:' Type] [`<: Type] | INT TypeBoundsTree(lo, hi) TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} ContextBounds(typeBounds, tps) @@ -160,7 +162,7 @@ grammar. | `_' | `(' ExprsInParens `)' Parens(exprs) | SimpleExpr `.' id Select(expr, id) - | SimpleExpr TypeArgs TypeApply(expr, args) + | SimpleExpr (TypeArgs | NamedTypeArgs) TypeApply(expr, args) | SimpleExpr1 ArgumentExprs Apply(expr, args) | XmlExpr ExprsInParens ::= ExprInParens {`,' ExprInParens} diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala index 991940f10..340b4e671 100644 --- a/src/dotty/tools/dotc/ast/Desugar.scala +++ b/src/dotty/tools/dotc/ast/Desugar.scala @@ -67,7 +67,7 @@ object desugar { val defctx = ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next var local = defctx.denotNamed(tp.name).suchThat(_ is ParamOrAccessor).symbol if (local.exists) (defctx.owner.thisType select local).dealias - else throw new Error(s"no matching symbol for ${sym.showLocated} in ${defctx.owner} / ${defctx.effectiveScope}") + else throw new Error(s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope}") case _ => mapOver(tp) } diff --git a/src/dotty/tools/dotc/ast/TreeInfo.scala b/src/dotty/tools/dotc/ast/TreeInfo.scala index 2b0f0aee3..c1efd0b0b 100644 --- a/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -215,6 +215,10 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => false } + /** Does this list contain a named argument tree? */ + def hasNamedArg(args: List[Any]) = args exists isNamedArg + val isNamedArg = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef) = cdef match { case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala index b78e4c79f..8e52d695b 100644 --- a/src/dotty/tools/dotc/ast/tpd.scala +++ b/src/dotty/tools/dotc/ast/tpd.scala @@ -67,7 +67,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Typed(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed = ta.assignType(untpd.Typed(expr, tpt), tpt) - def NamedArg(name: Name, arg: Tree)(implicit ctx: Context) = + def NamedArg(name: Name, arg: Tree)(implicit ctx: Context): NamedArg = ta.assignType(untpd.NamedArg(name, arg), arg) def Assign(lhs: Tree, rhs: Tree)(implicit ctx: Context): Assign = diff --git a/src/dotty/tools/dotc/core/CheckRealizable.scala b/src/dotty/tools/dotc/core/CheckRealizable.scala index 6e6efa549..11fd6786a 100644 --- a/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -63,23 +63,6 @@ class CheckRealizable(implicit ctx: Context) { */ private def isLateInitialized(sym: Symbol) = sym.is(Lazy, butNot = Module) - /** Is this type a path with some part that is initialized on use? - */ - private def isLateInitialized(tp: Type): Boolean = tp.dealias match { - case tp: TermRef => - isLateInitialized(tp.symbol) || isLateInitialized(tp.prefix) - case _: SingletonType | NoPrefix => - false - case tp: TypeRef => - true - case tp: TypeProxy => - isLateInitialized(tp.underlying) - case tp: AndOrType => - isLateInitialized(tp.tp1) || isLateInitialized(tp.tp2) - case _ => - true - } - /** The realizability status of given type `tp`*/ def realizability(tp: Type): Realizability = tp.dealias match { case tp: TermRef => @@ -121,13 +104,17 @@ class CheckRealizable(implicit ctx: Context) { } } - /** `Realizable` if `tp` all of `tp`'s non-struct fields have realizable types, + /** `Realizable` if all of `tp`'s non-struct fields have realizable types, * a `HasProblemField` instance pointing to a bad field otherwise. */ private def memberRealizability(tp: Type) = { def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = sofar andAlso { if (checkedFields.contains(fld.symbol) || fld.symbol.is(Private | Mutable | Lazy)) + // if field is private it cannot be part of a visible path + // if field is mutable it cannot be part of a path + // if field is lazy it does not need to be initialized when the owning object is + // so in all cases the field does not influence realizability of the enclosing object. Realizable else { checkedFields += fld.symbol diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala index 02b341649..a16901fa1 100644 --- a/src/dotty/tools/dotc/core/Flags.scala +++ b/src/dotty/tools/dotc/core/Flags.scala @@ -594,6 +594,9 @@ object Flags { /** A private parameter accessor */ final val PrivateParamAccessor = allOf(Private, ParamAccessor) + /** A type parameter introduced with [type ... ] */ + final val NamedTypeParam = allOf(TypeParam, ParamAccessor) + /** A local parameter */ final val ParamAndLocal = allOf(Param, Local) diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala index 705e1a4fa..bde8cc10a 100644 --- a/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1516,6 +1516,20 @@ object SymDenotations { if (myMemberCache != null) myMemberCache invalidate sym.name } + /** Make sure the type parameters of this class are `tparams`, reorder definitions + * in scope if necessary. + * @pre All type parameters in `tparams` are entered in class scope `info.decls`. + */ + def updateTypeParams(tparams: List[Symbol])(implicit ctx: Context): Unit = + if (!typeParams.corresponds(tparams)(_.name == _.name)) { + val decls = info.decls + val decls1 = newScope + for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name)) + for (sym <- decls) if (!typeParams.contains(sym)) decls1.enter(sym) + info = classInfo.derivedClassInfo(decls = decls1) + myTypeParams = null + } + /** All members of this class that have the given name. * The elements of the returned pre-denotation all * have existing symbols. diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala index e09328205..8f8a7dbdd 100644 --- a/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/src/dotty/tools/dotc/core/TypeApplications.scala @@ -244,8 +244,7 @@ class TypeApplications(val self: Type) extends AnyVal { else tsym.infoOrCompleter match { case completer: TypeParamsCompleter => val tparams = completer.completerTypeParams(tsym) - if (tsym.isClass) tparams - else defn.LambdaTrait(tparams.map(_.variance)).typeParams + defn.LambdaTrait(tparams.map(_.variance)).typeParams case _ => if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams else @@ -548,9 +547,12 @@ class TypeApplications(val self: Type) extends AnyVal { self case _ => val v = tparam.variance + /* Not neeeded. if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self) else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self) - else TypeAlias(self, v) + else + */ + TypeAlias(self, v) } /** The type arguments of this type's base type instance wrt. `base`. diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala index 6ec605466..4e7a4a75d 100644 --- a/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/src/dotty/tools/dotc/core/TypeComparer.scala @@ -371,7 +371,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { case _ => compareRefinedSlow || fourthTry(tp1, tp2) || - compareHkLambda(tp2, tp1, inOrder = false) + compareHkLambda(tp2, tp1, inOrder = false) || + compareAliasedRefined(tp2, tp1, inOrder = false) } else // fast path, in particular for refinements resulting from parameterization. isSubType(tp1, skipped2) && @@ -491,7 +492,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { } isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths case tp1: RefinedType => - isNewSubType(tp1.parent, tp2) || compareHkLambda(tp1, tp2, inOrder = true) + isNewSubType(tp1.parent, tp2) || + compareHkLambda(tp1, tp2, inOrder = true) || + compareAliasedRefined(tp1, tp2, inOrder = true) case AndType(tp11, tp12) => // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2 // and analogously for T11 & (T121 | T122) & T12 <: T2 @@ -614,6 +617,35 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { false } + /** Say we are comparing a refined type `P{type M = U}` or `P{type M >: L <: U}`. + * If P#M refers to a BaseTypeArg aliased to some other typeref P#N, + * do the same comparison with `P{type N = U}` or `P{type N >: L <: U}`, respectively. + * This allows to handle situations involving named type params like this one: + * + * trait Lambda[type Elem] + * trait Lst[T] extends Lambda[T] + * + * compareAliasedRefined is necessary so we establish that + * + * Lst[Int] = Lst[Elem = Int] + */ + private def compareAliasedRefined(rt: RefinedType, other: Type, inOrder: Boolean) = { + val mbr = refinedSymbol(rt) + mbr.is(BaseTypeArg) && { + mbr.info match { + case TypeAlias(TypeRef(_, aliasName)) => + val rt1 = rt.derivedRefinedType(rt.parent, aliasName, rt.refinedInfo) + subtyping.println(i"rewiring $rt to $rt1 in comparison with $other") + if (inOrder) isSubType(rt1, other) else isSubType(other, rt1) + case _ => + false + } + } + } + + /** The symbol referred to in the refinement of `rt` */ + private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol + /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time * to keep the constraint as wide as possible. Specifically, if * @@ -742,11 +774,14 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling { /** A type has been covered previously in subtype checking if it * is some combination of TypeRefs that point to classes, where the * combiners are RefinedTypes, AndTypes or AnnotatedTypes. + * One exception: Refinements referring to basetype args are never considered + * to be already covered. This is necessary because such refined types might + * still need to be compared with a compareAliasRefined. */ private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match { case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass case tp: ProtoType => false - case tp: RefinedType => isCovered(tp.parent) + case tp: RefinedType => isCovered(tp.parent) && !refinedSymbol(tp).is(BaseTypeArg) case tp: AnnotatedType => isCovered(tp.underlying) case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2) case _ => false diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala index 4251648a3..734d31858 100644 --- a/src/dotty/tools/dotc/core/TypeOps.scala +++ b/src/dotty/tools/dotc/core/TypeOps.scala @@ -444,10 +444,16 @@ trait TypeOps { this: Context => // TODO: Make standalone object. case tp: TypeRef => tp case tp @ RefinedType(tp1, name: TypeName) => - val prevInfo = refinements(name) - refinements = refinements.updated(name, - if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo) - formals = formals.updated(name, tp1.typeParamNamed(name)) + tp.refinedInfo match { + case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) => + // Don't record refinements of the form X = this.X (These can arise using named parameters). + typr.println(s"dropping refinement $tp") + case _ => + val prevInfo = refinements(name) + refinements = refinements.updated(name, + if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo) + formals = formals.updated(name, tp1.typeParamNamed(name)) + } normalizeToRef(tp1) case ErrorType => defn.AnyType diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 8595da640..887b69712 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -485,7 +485,13 @@ object Types { } def goThis(tp: ThisType) = { val d = go(tp.underlying) - if (d.exists) d + if (d.exists) + if ((pre eq tp) && d.symbol.is(NamedTypeParam) && (d.symbol.owner eq tp.cls)) + // If we look for a named type parameter `P` in `C.this.P`, looking up + // the fully applied self type of `C` will give as an info the alias type + // `P = this.P`. We need to return a denotation with the underlying bounds instead. + d.symbol.denot + else d else // There is a special case to handle: // trait Super { this: Sub => private class Inner {} println(this.Inner) } @@ -854,11 +860,9 @@ object Types { else NoType case tp: AnnotatedType => tp.underlying.underlyingClassRef(refinementOK) case tp: RefinedType => - if (refinementOK) tp.underlying.underlyingClassRef(refinementOK) - else { - val tycon = tp.withoutArgs(tp.argInfos) - if (tycon eq tp) NoType else tycon.underlyingClassRef(refinementOK) - } + def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName) + if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK) + else NoType case _ => NoType } diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index b1b229768..7a13388ae 100644 --- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -132,18 +132,10 @@ object Scala2Unpickler { } else { registerCompanionPair(scalacCompanion, denot.classSymbol) } - val declsTypeParams = denot.typeParams - val declsInRightOrder = - if (declsTypeParams.corresponds(tparams)(_.name == _.name)) decls - else { // create new scope with type parameters in right order - val decls1 = newScope - for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name)) - for (sym <- decls) if (!declsTypeParams.contains(sym)) decls1.enter(sym) - decls1 - } - denot.info = ClassInfo( // final info - denot.owner.thisType, denot.classSymbol, parentRefs, declsInRightOrder, ost) + denot.info = ClassInfo( // final info, except possibly for typeparams ordering + denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost) + denot.updateTypeParams(tparams) } } diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index bad6b95dc..bb8fbe08b 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -740,7 +740,7 @@ object Parsers { private def simpleTypeRest(t: Tree): Tree = in.token match { case HASH => simpleTypeRest(typeProjection(t)) - case LBRACKET => simpleTypeRest(atPos(t.pos.start) { AppliedTypeTree(t, typeArgs()) }) + case LBRACKET => simpleTypeRest(atPos(t.pos.start) { AppliedTypeTree(t, typeArgs(namedOK = true)) }) case _ => t } @@ -759,9 +759,38 @@ object Parsers { } else typ() + /** NamedTypeArg ::= id `=' ArgType + */ + val namedTypeArg = () => { + val name = ident() + accept(EQUALS) + NamedArg(name.toTypeName, argType()) + } + /** ArgTypes ::= ArgType {`,' ArgType} + * NamedTypeArg {`,' NamedTypeArg} */ - def argTypes() = commaSeparated(argType) + def argTypes(namedOK: Boolean = false) = { + def otherArgs(first: Tree, arg: () => Tree): List[Tree] = { + val rest = + if (in.token == COMMA) { + in.nextToken() + commaSeparated(arg) + } + else Nil + first :: rest + } + if (namedOK && in.token == IDENTIFIER) + argType() match { + case Ident(name) if in.token == EQUALS => + in.nextToken() + otherArgs(NamedArg(name, argType()), namedTypeArg) + case firstArg => + if (in.token == EQUALS) println(s"??? $firstArg") + otherArgs(firstArg, argType) + } + else commaSeparated(argType) + } /** FunArgType ::= ArgType | `=>' ArgType */ @@ -785,9 +814,10 @@ object Parsers { } else t } - /** TypeArgs ::= `[' ArgType {`,' ArgType} `]' - */ - def typeArgs(): List[Tree] = inBrackets(argTypes()) + /** TypeArgs ::= `[' ArgType {`,' ArgType} `]' + * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' + */ + def typeArgs(namedOK: Boolean = false): List[Tree] = inBrackets(argTypes(namedOK)) /** Refinement ::= `{' RefineStatSeq `}' */ @@ -1045,7 +1075,7 @@ object Parsers { * | Path * | `(' [ExprsInParens] `)' * | SimpleExpr `.' Id - * | SimpleExpr TypeArgs + * | SimpleExpr (TypeArgs | NamedTypeArgs) * | SimpleExpr1 ArgumentExprs */ def simpleExpr(): Tree = { @@ -1094,7 +1124,7 @@ object Parsers { in.nextToken() simpleExprRest(selector(t), canApply = true) case LBRACKET => - val tapp = atPos(t.pos.start, in.offset) { TypeApply(t, typeArgs()) } + val tapp = atPos(t.pos.start, in.offset) { TypeApply(t, typeArgs(namedOK = true)) } simpleExprRest(tapp, canApply = true) case LPAREN | LBRACE if canApply => val app = atPos(t.pos.start, in.offset) { Apply(t, argumentExprs()) } @@ -1493,7 +1523,7 @@ object Parsers { atPos(modStart, in.offset) { if (in.token == TYPE) { in.nextToken() - mods | Param + mods | Param | ParamAccessor } else { if (mods.hasFlags) syntaxError("`type' expected") mods | Param | PrivateLocal diff --git a/src/dotty/tools/dotc/transform/Constructors.scala b/src/dotty/tools/dotc/transform/Constructors.scala index 265ad3217..b6ebd7d90 100644 --- a/src/dotty/tools/dotc/transform/Constructors.scala +++ b/src/dotty/tools/dotc/transform/Constructors.scala @@ -246,7 +246,7 @@ class Constructors extends MiniPhaseTransform with SymTransformer { thisTransfor // Drop accessors that are not retained from class scope if (dropped.nonEmpty) { - val clsInfo = cls.classInfo // TODO investigate: expand clsInfo to cls.info => dotty type error + val clsInfo = cls.classInfo cls.copy( info = clsInfo.derivedClassInfo( decls = clsInfo.decls.filteredScope(!dropped.contains(_)))) diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala index 14edaa7b5..d552c16f7 100644 --- a/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/src/dotty/tools/dotc/transform/PostTyper.scala @@ -68,7 +68,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran // TODO fill in } - /** Check bounds of AppliedTypeTrees. + /** Check bounds of AppliedTypeTrees and TypeApplys. * Replace type trees with TypeTree nodes. * Replace constant expressions with Literal nodes. * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. @@ -99,6 +99,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran */ private def normalizeTree(tree: Tree)(implicit ctx: Context): Tree = tree match { case tree: TypeTree => tree + case TypeApply(fn, args) => + Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType]) + tree case _ => if (tree.isType) { Checking.typeChecker.traverse(tree) @@ -140,6 +143,41 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran } } + private def normalizeTypeArgs(tree: TypeApply)(implicit ctx: Context): TypeApply = tree.tpe match { + case pt: PolyType => // wait for more arguments coming + tree + case _ => + def decompose(tree: TypeApply): (Tree, List[Tree]) = tree.fun match { + case fun: TypeApply => + val (tycon, args) = decompose(fun) + (tycon, args ++ tree.args) + case _ => + (tree.fun, tree.args) + } + def reorderArgs(pnames: List[Name], namedArgs: List[NamedArg], otherArgs: List[Tree]): List[Tree] = pnames match { + case pname :: pnames1 => + namedArgs.partition(_.name == pname) match { + case (NamedArg(_, arg) :: _, namedArgs1) => + arg :: reorderArgs(pnames1, namedArgs1, otherArgs) + case _ => + val otherArg :: otherArgs1 = otherArgs + otherArg :: reorderArgs(pnames1, namedArgs, otherArgs1) + } + case nil => + assert(namedArgs.isEmpty && otherArgs.isEmpty) + Nil + } + val (tycon, args) = decompose(tree) + tycon.tpe.widen match { + case PolyType(pnames) => + val (namedArgs, otherArgs) = args.partition(isNamedArg) + val args1 = reorderArgs(pnames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs) + TypeApply(tycon, args1).withPos(tree.pos).withType(tree.tpe) + case _ => + tree + } + } + override def transform(tree: Tree)(implicit ctx: Context): Tree = try normalizeTree(tree) match { case tree: Ident => @@ -149,15 +187,16 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran } case tree: Select => transformSelect(paramFwd.adaptRef(tree), Nil) - case tree @ TypeApply(fn, args) => + case tree: TypeApply => + val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree) Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType]) fn match { case sel: Select => val args1 = transform(args) val sel1 = transformSelect(sel, args1) - if (superAcc.isProtectedAccessor(sel1)) sel1 else cpy.TypeApply(tree)(sel1, args1) + if (superAcc.isProtectedAccessor(sel1)) sel1 else cpy.TypeApply(tree1)(sel1, args1) case _ => - super.transform(tree) + super.transform(tree1) } case tree @ Assign(sel: Select, _) => superAcc.transformAssign(super.transform(tree)) diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala index 649b8088f..098385d4b 100644 --- a/src/dotty/tools/dotc/typer/Applications.scala +++ b/src/dotty/tools/dotc/typer/Applications.scala @@ -29,8 +29,6 @@ import language.implicitConversions object Applications { import tpd._ - private val isNamedArg = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] - def hasNamedArg(args: List[Any]) = args exists isNamedArg def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx:Context) = { val ref = tp.member(name).suchThat(_.info.isParameterless) @@ -263,7 +261,12 @@ trait Applications extends Compatibility { self: Typer => case Select(receiver, _) => receiver case mr => mr.tpe.normalizedPrefix match { case mr: TermRef => ref(mr) - case _ => EmptyTree + case mr => + if (this.isInstanceOf[TestApplication[_]]) + // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call. + ref(mr.narrow) + else + EmptyTree } } val getterPrefix = @@ -600,12 +603,19 @@ trait Applications extends Compatibility { self: Typer => protected def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(implicit ctx: Context): Tree = throw new Error(s"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}") + def typedNamedArgs(args: List[untpd.Tree])(implicit ctx: Context) = + for (arg @ NamedArg(id, argtpt) <- args) yield { + val argtpt1 = typedType(argtpt) + cpy.NamedArg(arg)(id, argtpt1).withType(argtpt1.tpe) + } + def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = track("typedTypeApply") { - var typedArgs = tree.args mapconserve (typedType(_)) + val isNamed = hasNamedArg(tree.args) + var typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt)) typedFn.tpe.widen match { case pt: PolyType => - if (typedArgs.length <= pt.paramBounds.length) + if (typedArgs.length <= pt.paramBounds.length && !isNamed) typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg) case _ => } @@ -1056,10 +1066,8 @@ trait Applications extends Compatibility { self: Typer => def narrowByShapes(alts: List[TermRef]): List[TermRef] = { if (normArgs exists (_.isInstanceOf[untpd.Function])) - if (args exists (_.isInstanceOf[Trees.NamedArg[_]])) - narrowByTrees(alts, args map treeShape, resultType) - else - narrowByTypes(alts, normArgs map typeShape, resultType) + if (hasNamedArg(args)) narrowByTrees(alts, args map treeShape, resultType) + else narrowByTypes(alts, normArgs map typeShape, resultType) else alts } @@ -1100,10 +1108,13 @@ trait Applications extends Compatibility { self: Typer => // the arguments (which are constants) to be adapted to Byte. If we had picked // `candidates` instead, no solution would have been found. case alts => -// overload.println(i"ambiguous $alts%, %") - val deepPt = pt.deepenProto - if (deepPt ne pt) resolveOverloaded(alts, deepPt, targs) - else alts + val noDefaults = alts.filter(!_.symbol.hasDefaultParams) + if (noDefaults.length == 1) noDefaults // return unique alternative without default parameters if it exists + else { + val deepPt = pt.deepenProto + if (deepPt ne pt) resolveOverloaded(alts, deepPt, targs) + else alts + } } } diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala index 437902d05..414d8952d 100644 --- a/src/dotty/tools/dotc/typer/Checking.scala +++ b/src/dotty/tools/dotc/typer/Checking.scala @@ -49,12 +49,22 @@ object Checking { def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit = checkBounds(args, poly.paramBounds, _.substParams(poly, _)) - /** Check all AppliedTypeTree nodes in this tree for legal bounds */ + /** Traverse type tree, performing the following checks: + * 1. All arguments of applied type trees must conform to their bounds. + * 2. Prefixes of type selections and singleton types must be realizable. + */ val typeChecker = new TreeTraverser { def traverse(tree: Tree)(implicit ctx: Context) = { tree match { case AppliedTypeTree(tycon, args) => - val tparams = tycon.tpe.typeSymbol.typeParams + // If `args` is a list of named arguments, return corresponding type parameters, + // otherwise return type parameters unchanged + def matchNamed(tparams: List[TypeSymbol], args: List[Tree]): List[Symbol] = + if (hasNamedArg(args)) + for (NamedArg(name, _) <- args) yield tycon.tpe.member(name).symbol + else + tparams + val tparams = matchNamed(tycon.tpe.typeSymbol.typeParams, args) val bounds = tparams.map(tparam => tparam.info.asSeenFrom(tycon.tpe.normalizedPrefix, tparam.owner.owner).bounds) checkBounds(args, bounds, _.substDealias(tparams, _)) diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index c8d2b3418..13ed96249 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -576,8 +576,8 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { - /** The type of a parent constructor. Types constructor arguments - * only if parent type contains uninstantiated type parameters. + /* The type of a parent constructor. Types constructor arguments + * only if parent type contains uninstantiated type parameters. */ def parentType(parent: untpd.Tree)(implicit ctx: Context): Type = if (parent.isType) { @@ -594,7 +594,12 @@ class Namer { typer: Typer => else typedAheadExpr(parent).tpe } - def checkedParentType(parent: untpd.Tree): Type = { + /* Check parent type tree `parent` for the following well-formedness conditions: + * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) + * (2) If may not derive from itself + * (3) Overriding type parameters must be correctly forwarded. (@see checkTypeParamOverride) + */ + def checkedParentType(parent: untpd.Tree, paramAccessors: List[Symbol]): Type = { val ptype = parentType(parent)(ctx.superCallContext) if (cls.isRefinementClass) ptype else { @@ -608,10 +613,52 @@ class Namer { typer: Typer => ctx.error(i"cyclic inheritance: $cls extends itself$addendum", parent.pos) defn.ObjectType } + else if (!paramAccessors.forall(checkTypeParamOverride(pt, _))) + defn.ObjectType else pt } } + /* Check that every parameter with the same name as a visible named parameter in the parent + * class satisfies the following two conditions: + * (1) The overriding parameter is also named (i.e. not local/name mangled). + * (2) The overriding parameter is passed on directly to the parent parameter, or the + * parent parameter is not fully defined. + * @return true if conditions are satisfied, false otherwise. + */ + def checkTypeParamOverride(parent: Type, paramAccessor: Symbol): Boolean = { + var ok = true + val pname = paramAccessor.name + + def illegal(how: String): Unit = { + ctx.error(d"Illegal override of public type parameter $pname in $parent$how", paramAccessor.pos) + ok = false + } + + def checkAlias(tp: Type): Unit = tp match { + case tp: RefinedType => + if (tp.refinedName == pname) + tp.refinedInfo match { + case TypeAlias(alias) => + alias match { + case TypeRef(pre, name1) if name1 == pname && (pre =:= cls.thisType) => + // OK, parameter is passed on directly + case _ => + illegal(d".\nParameter is both redeclared and instantiated with $alias.") + } + case _ => // OK, argument is not fully defined + } + else checkAlias(tp.parent) + case _ => + } + if (parent.nonPrivateMember(paramAccessor.name).symbol.is(Param)) + if (paramAccessor is Private) + illegal("\nwith private parameter. Parameter definition needs to be prefixed with `type'.") + else + checkAlias(parent) + ok + } + val selfInfo = if (self.isEmpty) NoType else if (cls.is(Module)) { @@ -634,7 +681,8 @@ class Namer { typer: Typer => index(constr) symbolOfTree(constr).ensureCompleted() - val parentTypes = ensureFirstIsClass(parents map checkedParentType) + val tparamAccessors = decls.filter(_ is TypeParamAccessor).toList + val parentTypes = ensureFirstIsClass(parents.map(checkedParentType(_, tparamAccessors))) val parentRefs = ctx.normalizeToClassRefs(parentTypes, cls, decls) typr.println(s"completing $denot, parents = $parents, parentTypes = $parentTypes, parentRefs = $parentRefs") diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index ac46ee723..fd4f0011c 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -8,7 +8,9 @@ import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._ import util.Positions._ import config.Printers._ +import ast.Trees._ import NameOps._ +import collection.mutable trait TypeAssigner { import tpd._ @@ -184,8 +186,11 @@ trait TypeAssigner { ErrorType } } - else if (d.symbol is TypeParamAccessor) // always dereference type param accessors - ensureAccessible(d.info.bounds.hi, superAccess, pos) + else if (d.symbol is TypeParamAccessor) + if (d.info.isAlias) + ensureAccessible(d.info.bounds.hi, superAccess, pos) + else // It's a named parameter, use the non-symbolic representation to pick up inherited versions as well + d.symbol.owner.thisType.select(d.symbol.name) else ctx.makePackageObjPrefixExplicit(tpe withDenot d) case _ => @@ -307,9 +312,44 @@ trait TypeAssigner { def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(implicit ctx: Context) = { val ownType = fn.tpe.widen match { case pt: PolyType => - val argTypes = args.tpes - if (sameLength(argTypes, pt.paramNames)|| ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes) - else errorType(d"wrong number of type parameters for ${fn.tpe}; expected: ${pt.paramNames.length}", tree.pos) + val paramNames = pt.paramNames + if (hasNamedArg(args)) { + val argMap = new mutable.HashMap[Name, Type] + for (NamedArg(name, arg) <- args) + if (argMap.contains(name)) + ctx.error("duplicate name", arg.pos) + else if (!paramNames.contains(name)) + ctx.error(s"undefined parameter name, required: ${paramNames.mkString(" or ")}", arg.pos) + else + argMap(name) = arg.tpe + val gapBuf = new mutable.ListBuffer[Int] + def nextPoly = { + val idx = gapBuf.length + gapBuf += idx + PolyParam(pt, idx) + } + val normArgs = paramNames.map(pname => argMap.getOrElse(pname, nextPoly)) + val transform = new TypeMap { + def apply(t: Type) = t match { + case PolyParam(`pt`, idx) => normArgs(idx) + case _ => mapOver(t) + } + } + val resultType1 = transform(pt.resultType) + if (gapBuf.isEmpty) resultType1 + else { + val gaps = gapBuf.toList + pt.derivedPolyType( + gaps.map(paramNames.filterNot(argMap.contains)), + gaps.map(idx => transform(pt.paramBounds(idx)).bounds), + resultType1) + } + } + else { + val argTypes = args.tpes + if (sameLength(argTypes, paramNames)|| ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes) + else errorType(d"wrong number of type parameters for ${fn.tpe}; expected: ${pt.paramNames.length}", tree.pos) + } case _ => errorType(i"${err.exprStr(fn)} does not take type parameters", tree.pos) } @@ -375,8 +415,25 @@ trait TypeAssigner { def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(implicit ctx: Context) = { val tparams = tycon.tpe.typeParams + def refineNamed(tycon: Type, arg: Tree) = arg match { + case ast.Trees.NamedArg(name, argtpt) => + // Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error + val tparam = tparams.find(_.name == name) match { + case Some(tparam) => tparam + case none => + val sym = tycon.member(name).symbol + if (sym.isAbstractType) sym + else if (sym.is(ParamAccessor)) sym.info.dealias.typeSymbol + else NoSymbol + } + if (tparam.exists) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam)) + else errorType(s"$tycon does not have a parameter or abstract type member named $name", arg.pos) + case _ => + errorType(s"named and positional type arguments may not be mixed", arg.pos) + } val ownType = - if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes) + if (hasNamedArg(args)) (tycon.tpe /: args)(refineNamed) + else if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes) else errorType(d"wrong number of type arguments for ${tycon.tpe}, should be ${tparams.length}", tree.pos) tree.withType(ownType) } diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index d39f2dd43..fc2bf2381 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -17,6 +17,7 @@ import SymDenotations._ import Annotations._ import Names._ import NameOps._ +import Applications._ import Flags._ import Decorators._ import ErrorReporting._ @@ -885,26 +886,30 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") { val tpt1 = typed(tree.tpt)(ctx retractMode Mode.Pattern) val tparams = tpt1.tpe.typeParams - var args = tree.args if (tparams.isEmpty) { ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos) tpt1 } else { - if (args.length != tparams.length) { - ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) - args = args.take(tparams.length) - } - def typedArg(arg: untpd.Tree, tparam: Symbol) = { - val (desugaredArg, argPt) = - if (ctx.mode is Mode.Pattern) - (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.info) - else - (arg, WildcardType) - val arg1 = typed(desugaredArg, argPt) - adaptTypeArg(arg1, tparam.info) - } - val args1 = args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] + var args = tree.args + val args1 = + if (hasNamedArg(args)) typedNamedArgs(args) + else { + if (args.length != tparams.length) { + ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos) + args = args.take(tparams.length) + } + def typedArg(arg: untpd.Tree, tparam: Symbol) = { + val (desugaredArg, argPt) = + if (ctx.mode is Mode.Pattern) + (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.info) + else + (arg, WildcardType) + val arg1 = typed(desugaredArg, argPt) + adaptTypeArg(arg1, tparam.info) + } + args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]] + } // check that arguments conform to bounds is done in phase PostTyper assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) } diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala index 421846ca2..6a61ec62a 100644 --- a/test/dotc/tests.scala +++ b/test/dotc/tests.scala @@ -122,6 +122,7 @@ class tests extends CompilerTest { @Test def neg_autoTupling = compileFile(posDir, "autoTuplingTest", args = "-language:noAutoTupling" :: Nil, xerrors = 3) @Test def neg_autoTupling2 = compileFile(negDir, "autoTuplingTest", xerrors = 3) @Test def neg_companions = compileFile(negDir, "companions", xerrors = 1) + @Test def namedParams = compileFile(negDir, "named-params", xerrors = 14) @Test def neg_over = compileFile(negDir, "over", xerrors = 3) @Test def neg_overrides = compileFile(negDir, "overrides", xerrors = 14) @Test def neg_overrideClass = compileFile(negDir, "overrideClass", List("-language:Scala2"), xerrors = 1) diff --git a/tests/neg/cycles.scala b/tests/neg/cycles.scala index 11efea625..f9e546aca 100644 --- a/tests/neg/cycles.scala +++ b/tests/neg/cycles.scala @@ -1,21 +1,22 @@ -class Foo[T <: U, U <: T] // error: cycle +class Foo[T <: U, U <: T] // error: illegal cyclic reference: upper bound U of type T refers back to the type itself + +class Bar[T >: T] // error: illegal cyclic reference: lower bound T of type T refers back to the type itself -class Bar[T >: T] // error: cycle class A { val x: T = ??? - type T <: x.type // error: cycle + type T <: x.type // error: cyclic reference involving value x } class B { - type T <: x.type // error: cycle - final val x: T = ??? + type T <: x.type // error: illegal cyclic reference: upper bound B.this.T(B.this.x) of type T refers back to the type itself + val x: T = ??? } class C { final val x: D#T = ??? class D { - type T <: x.type // error: cycle + type T <: x.type // error: cyclic reference involving value x val z: x.type = ??? } } diff --git a/tests/neg/named-params.scala b/tests/neg/named-params.scala new file mode 100644 index 000000000..9ef4ed066 --- /dev/null +++ b/tests/neg/named-params.scala @@ -0,0 +1,34 @@ +package namedparams + +class C[type Elem, type Value](val elem: Elem) { + def toVal: Elem = ??? +} + +abstract class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) +abstract class D2[Elem, V](elem: Elem) extends C[Elem, V](elem) // error +abstract class D3[type Elem, V](x: V) extends C[V, V](x) // error +abstract class D4[type Elem](elem: Elem) extends C[Elem, Elem] // error +object Test { + val c = new C[String, String]("A") { + override def toVal = elem + } + val x: c.Elem = c.elem + + val c2: C { type Elem = String } = c + + val c3 = new C[Elem = String, Value = Int]("B") + val c4 = new C[Elem = String]("C") + val x2: c2.Elem = c2.elem + + val c5 = new C[Elem1 = String, Value0 = Int]("B") // error // error + + def d2[E, V](x: E) = new C[Elem = E, Value = V](x) + + val dup = d2[E = Int, V = String, E = Boolean](2) // error + val z1 = d2[Elem = Int, Value = String](1) // error // error + val z2 = d2[Value = String, Elem = Int](1) // error // error + val z3 = d2[Elem = Int](1) // error + val z4 = d2[Value = Int]("AAA") // error + val z5 = d2[Elem = Int][Value = String](1) //error // error + +} diff --git a/tests/pending/pickling/named-params.scala b/tests/pending/pickling/named-params.scala new file mode 100644 index 000000000..2697a7bb7 --- /dev/null +++ b/tests/pending/pickling/named-params.scala @@ -0,0 +1,20 @@ +package namedparams + +class C[type Elem, type Value](val elem: Elem) { + def toVal: Elem = ??? +} + +class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) + +object Test { + val c = new C[String, String]("A") { + override def toVal = elem + } + val x: c.Elem = c.elem + + val c2: C { type Elem = String } = c + + val c3 = new C[Elem = String, Value = Int]("B") + val c4 = new C[Elem = String]("C") + val x2: c2.Elem = c2.elem +} diff --git a/tests/pos/CollectionStrawMan3.scala b/tests/pos/CollectionStrawMan3.scala new file mode 100644 index 000000000..47d3b52d6 --- /dev/null +++ b/tests/pos/CollectionStrawMan3.scala @@ -0,0 +1,408 @@ +package strawman.collections + +import Predef.{augmentString => _, wrapString => _, _} +import scala.reflect.ClassTag + +/** A strawman architecture for new collections. It contains some + * example collection classes and methods with the intent to expose + * some key issues. It would be good to compare this to other + * implementations of the same functionality, to get an idea of the + * strengths and weaknesses of different collection architectures. + * + * For a test file, see tests/run/CollectionTests.scala. + * + * This one is like CollectionStrawMan1, but with the named parameter + * scheme for hk types. + */ +object CollectionStrawMan1 { + + /* ------------ Base Traits -------------------------------- */ + + /** Replaces TraversableOnce */ + trait CanIterate[type +Elem] { + def iterator: Iterator[Elem] + } + + /** Base trait for instances that can construct a collection from an iterator */ + trait FromIterator[+C <: Iterable] { + def fromIterator[B](it: Iterator[B]): C[Elem = B] + } + + /** Base trait for companion objects of collections */ + trait IterableFactory[+C <: Iterable] extends FromIterator[C] { + def empty[X]: C[Elem = X] = fromIterator(Iterator.empty) + def apply[A](xs: A*): C[Elem = A] = fromIterator(Iterator(xs: _*)) + } + + /** Base trait for generic collections */ + trait Iterable[type +Elem] extends CanIterate[Elem] with FromIterator[Iterable] + + /** Base trait for sequence collections */ + trait Seq[type +Elem] extends Iterable[Elem] with FromIterator[Seq] { + def apply(i: Int): Elem + def length: Int + } + + /* ------------ Operations ----------------------------------- */ + + /** Operations returning types unrelated to current collection */ + trait Ops[A] extends Any { + def iterator: Iterator[A] + def foreach(f: A => Unit): Unit = iterator.foreach(f) + def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op) + def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op) + def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p) + def isEmpty: Boolean = !iterator.hasNext + def head: A = iterator.next + def view: View[A] = new View(iterator) + def to[C <: Iterable](fi: FromIterator[C]): C[Elem = A] = fi.fromIterator(iterator) + } + + /** Transforms returning same collection type */ + trait MonoTransforms[A, Repr] extends Any { + protected def iter: Iterator[A] + protected def fromIter(it: => Iterator[A]): Repr + def partition(p: A => Boolean): (Repr, Repr) = { + val (xs, ys) = iter.partition(p) + (fromIter(xs), fromIter(ys)) + } + def drop(n: Int): Repr = fromIter(iter.drop(n)) + } + + /** Transforms returning same collection type constructor */ + trait PolyTransforms[A, C <: CanIterate] extends Any { + protected def iter: Iterator[A] + protected def fromIter[B](it: => Iterator[B]): C[Elem = B] + def map[B](f: A => B): C[Elem = B] = fromIter(iter.map(f)) + def flatMap[B](f: A => CanIterate[B]): C[Elem = B] = fromIter(iter.flatMap(f(_))) + def ++[B >: A](xs: CanIterate[B]): C[Elem = B] = fromIter(iter ++ xs) + def zip[B](xs: CanIterate[B]): C[Elem = (A, B)] = fromIter(iter.zip(xs.iterator)) + } + + /** Transforms that only apply to Seq */ + trait MonoTransformsOfSeqs[A, Repr] extends Any with MonoTransforms[A, Repr] { + def reverse: Repr = fromIter(iter.reverse) + } + + /** Implementation of Ops for all generic collections */ + implicit class IterableOps[A](val c: Iterable[A]) + extends AnyVal with Ops[A] { + def iterator = c.iterator + } + + /** Implementation of MonoTransforms for all generic collections */ + implicit class IterableMonoTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) + extends AnyVal with MonoTransforms[A, C[Elem = A]] { + protected def iter = c.iterator + protected def fromIter(it: => Iterator[A]): C[Elem = A] = c.fromIterator(it) + } + + /** Implementation of PolyTransforms for all generic collections */ + implicit class IterablePolyTransforms[A, C <: Iterable](val c: Iterable[A] with FromIterator[C]) + extends AnyVal with PolyTransforms[A, C] { + protected def iter = c.iterator + protected def fromIter[B](it: => Iterator[B]) = c.fromIterator(it) + } + + /** Implementation of MonoTransformsForSeqs for all generic collections */ + implicit class SeqMonoTransforms[A, C <: Seq](val c: Seq[A] with FromIterator[C]) + extends AnyVal with MonoTransformsOfSeqs[A, C[Elem = A]] { + protected def iter = c.iterator + protected def fromIter(it: => Iterator[A]) = c.fromIterator(it) + } + + /* --------- Concrete collection types ------------------------------- */ + + /** Concrete collection type: List */ + sealed trait List[+A] extends Seq[A] with FromIterator[List] { + def isEmpty: Boolean + def head: A + def tail: List[A] + def iterator = new ListIterator[A](this) + def fromIterator[B](it: Iterator[B]): List[B] = List.fromIterator(it) + def apply(i: Int): A = { + require(!isEmpty) + if (i == 0) head else tail.apply(i - 1) + } + def length: Int = + if (isEmpty) 0 else 1 + tail.length + } + + case class Cons[+A](x: A, xs: List[A]) extends List[A] { + def isEmpty = false + def head = x + def tail = xs + } + + case object Nil extends List[Nothing] { + def isEmpty = true + def head = ??? + def tail = ??? + } + + object List extends IterableFactory[List] { + def fromIterator[B](it: Iterator[B]): List[B] = it match { + case it: ListIterator[B] => it.toList + case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil + } + } + + class ListIterator[+A](xs: List[A]) extends Iterator[A] { + private[this] var current = xs + def hasNext = !current.isEmpty + def next = { val r = current.head; current = current.tail; r } + def toList = current + } + + /** Concrete collection type: ArrayBuffer */ + class ArrayBuffer[A] private (initElems: Array[AnyRef], initLength: Int) extends Seq[A] with FromIterator[ArrayBuffer] { + def this() = this(new Array[AnyRef](16), 0) + private var elems: Array[AnyRef] = initElems + private var start = 0 + private var limit = initLength + def apply(i: Int) = elems(start + i).asInstanceOf[A] + def length = limit - start + def iterator = new ArrayBufferIterator[A](elems, start, length) + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = + ArrayBuffer.fromIterator(it) + def +=(elem: A): this.type = { + if (limit == elems.length) { + if (start > 0) { + Array.copy(elems, start, elems, 0, length) + limit -= start + start = 0 + } + else { + val newelems = new Array[AnyRef](limit * 2) + Array.copy(elems, 0, newelems, 0, limit) + elems = newelems + } + } + elems(limit) = elem.asInstanceOf[AnyRef] + limit += 1 + this + } + def trimStart(n: Int): Unit = start += (n max 0) + override def toString = s"ArrayBuffer(${elems.slice(start, limit).mkString(", ")})" + } + + object ArrayBuffer extends IterableFactory[ArrayBuffer] { + def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] = it match { + case Iterator.Concat(fst: ArrayBufferIterator[_], snd: ArrayBufferIterator[_]) => + val elems = new Array[AnyRef](fst.remaining + snd.remaining) + Array.copy(fst.elems, fst.start, elems, 0, fst.remaining) + Array.copy(snd.elems, snd.start, elems, fst.remaining, snd.remaining) + new ArrayBuffer(elems, elems.length) + case it @ Iterator.Partition(underlying, _, buf, _) => + while (underlying.hasNext) it.distribute() + buf.asInstanceOf[ArrayBuffer[B]] + case it if it.remaining >= 0 => + val elems = new Array[AnyRef](it.remaining) + for (i <- 0 until elems.length) elems(i) = it.next.asInstanceOf[AnyRef] + new ArrayBuffer[B](elems, elems.length) + case _ => + val buf = new ArrayBuffer[B] + while (it.hasNext) buf += it.next + buf + } + } + + class ArrayBufferIterator[A](val elems: Array[AnyRef], initStart: Int, length: Int) extends RandomAccessIterator[A] { + val limit = length + def apply(n: Int) = elems(initStart + n).asInstanceOf[A] + } + + /** Concrete collection type: View */ + class View[+A](it: => Iterator[A]) extends CanIterate[A] { + def iterator = it + } + + implicit class ViewOps[A](val v: View[A]) extends AnyVal with Ops[A] { + def iterator = v.iterator + def cache = to(ArrayBuffer).view + } + + implicit class ViewMonoTransforms[A](val v: View[A]) + extends AnyVal with MonoTransforms[A, View[A]] { + protected def iter = v.iterator + protected def fromIter(it: => Iterator[A]): View[A] = new View(it) + } + + implicit class ViewPolyTransforms[A](val v: View[A]) + extends AnyVal with PolyTransforms[A, View] { + protected def iter = v.iterator + protected def fromIter[B](it: => Iterator[B]) = new View(it) + } + + /** Concrete collection type: String */ + implicit class StringOps(val s: String) extends AnyVal with Ops[Char] { + def iterator: Iterator[Char] = new RandomAccessIterator[Char] { + override val limit = s.length + def apply(n: Int) = s.charAt(n) + } + } + + implicit class StringMonoTransforms(val s: String) + extends AnyVal with MonoTransformsOfSeqs[Char, String] { + protected def iter = StringOps(s).iterator + protected def fromIter(it: => Iterator[Char]) = { + val sb = new StringBuilder + for (ch <- it) sb.append(ch) + sb.toString + } + } + + implicit class StringPolyTransforms(val s: String) + extends AnyVal with PolyTransforms[Char, Seq] { + protected def iter = StringOps(s).iterator + protected def fromIter[B](it: => Iterator[B]) = List.fromIterator(it) + def map(f: Char => Char): String = { + val sb = new StringBuilder + for (ch <- s) sb.append(f(ch)) + sb.toString + } + def flatMap(f: Char => String) = { + val sb = new StringBuilder + for (ch <- s) sb.append(f(ch)) + sb.toString + } + def ++(xs: CanIterate[Char]): String = { + val sb = new StringBuilder(s) + for (ch <- xs.iterator) sb.append(ch) + sb.toString + } + def ++(xs: String): String = s + xs + } + +/* ---------- Iterators --------------------------------------------------- */ + + /** A core Iterator class */ + trait Iterator[+A] extends CanIterate[A] { self => + def hasNext: Boolean + def next: A + def iterator = this + def foldLeft[B](z: B)(op: (B, A) => B): B = + if (hasNext) foldLeft(op(z, next))(op) else z + def foldRight[B](z: B)(op: (A, B) => B): B = + if (hasNext) op(next, foldRight(z)(op)) else z + def foreach(f: A => Unit): Unit = + while (hasNext) f(next) + def indexWhere(p: A => Boolean): Int = { + var i = 0 + while (hasNext) { + if (p(next)) return i + i += 1 + } + -1 + } + def map[B](f: A => B): Iterator[B] = Iterator.Map(this, f) + def flatMap[B](f: A => CanIterate[B]): Iterator[B] = Iterator.FlatMap(this, f) + def ++[B >: A](xs: CanIterate[B]): Iterator[B] = Iterator.Concat(this, xs.iterator) + def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { + val lookaheadTrue, lookaheadFalse = new ArrayBuffer[A] + (Iterator.Partition(this, p, lookaheadTrue, lookaheadFalse), + Iterator.Partition[A](this, !p(_), lookaheadFalse, lookaheadTrue)) + } + def drop(n: Int): Iterator[A] = Iterator.Drop(this, n) + def zip[B](that: CanIterate[B]): Iterator[(A, B)] = Iterator.Zip(this, that.iterator) + def reverse: Iterator[A] = { + var elems: List[A] = Nil + while (hasNext) elems = Cons(next, elems) + elems.iterator + } + + /** If this iterator results from applying a transfomation to another iterator, + * that other iterator, otherwise the iterator itself. + */ + def underlying: Iterator[_] = this + + /** If the number of elements still to be returned by this iterator is known, + * that number, otherwise -1. + */ + def remaining = -1 + } + + object Iterator { + val empty: Iterator[Nothing] = new Iterator[Nothing] { + def hasNext = false + def next = ??? + override def remaining = 0 + } + def apply[A](xs: A*): Iterator[A] = new RandomAccessIterator[A] { + override val limit = xs.length + def apply(n: Int) = xs(n) + } + def nextOnEmpty = throw new NoSuchElementException("next on empty iterator") + + case class Map[A, B](override val underlying: Iterator[A], f: A => B) extends Iterator[B] { + def hasNext = underlying.hasNext + def next = f(underlying.next) + override def remaining = underlying.remaining + } + case class FlatMap[A, B](override val underlying: Iterator[A], f: A => CanIterate[B]) extends Iterator[B] { + private var myCurrent: Iterator[B] = Iterator.empty + private def current = { + while (!myCurrent.hasNext && underlying.hasNext) + myCurrent = f(underlying.next).iterator + myCurrent + } + def hasNext = current.hasNext + def next = current.next + } + case class Concat[A](override val underlying: Iterator[A], other: Iterator[A]) extends Iterator[A] { + private var myCurrent = underlying + private def current = { + if (!myCurrent.hasNext && myCurrent.eq(underlying)) myCurrent = other + myCurrent + } + def hasNext = current.hasNext + def next = current.next + override def remaining = + if (underlying.remaining >= 0 && other.remaining >= 0) + underlying.remaining + other.remaining + else -1 + } + case class Partition[A](override val underlying: Iterator[A], p: A => Boolean, lookahead: ArrayBuffer[A], dual: ArrayBuffer[A]) extends Iterator[A] { + def distribute() = { + val elem = underlying.next + (if (p(elem)) lookahead else dual) += elem + } + final def hasNext: Boolean = + !lookahead.isEmpty || underlying.hasNext && { distribute(); hasNext } + final def next = + if (hasNext) { + val r = lookahead.head + lookahead.trimStart(1) + r + } else Iterator.nextOnEmpty + } + case class Drop[A](override val underlying: Iterator[A], n: Int) extends Iterator[A] { + var toSkip = n + def hasNext: Boolean = underlying.hasNext && ( + toSkip == 0 || { underlying.next; toSkip -= 1; hasNext }) + def next = if (hasNext) underlying.next else nextOnEmpty + override def remaining = (underlying.remaining - toSkip) max -1 + } + case class Zip[A, B](override val underlying: Iterator[A], other: Iterator[B]) extends Iterator[(A, B)] { + def hasNext = underlying.hasNext && other.hasNext + def next = (underlying.next, other.next) + override def remaining = underlying.remaining min other.remaining + } + case class Reverse[A](override val underlying: RandomAccessIterator[A]) extends RandomAccessIterator[A] { + def apply(n: Int) = underlying.apply(underlying.limit - 1 - n) + def limit = underlying.remaining + } + } + + trait RandomAccessIterator[+A] extends Iterator[A] { self => + def apply(n: Int): A + def limit: Int + var start = 0 + override def remaining = (limit - start) max 0 + def hasNext = start < limit + def next: A = { val r = this(start); start += 1; r } + override def drop(n: Int): Iterator[A] = { start += (n max 0); this } + override def reverse: Iterator[A] = new Iterator.Reverse(this) + } +} + diff --git a/tests/pos/hk-named.scala b/tests/pos/hk-named.scala new file mode 100644 index 000000000..5f2cb6c74 --- /dev/null +++ b/tests/pos/hk-named.scala @@ -0,0 +1,58 @@ +import language.higherKinds + +object hk0 { + + trait Lambda[type Elem] + + abstract class Functor[F <: Lambda] { + def map[A, B](f: A => B): F[Elem = A] => F[Elem = B] + } + + object test1 { + class ListT[T] extends Lambda[T] + + val ml: Functor[ListT] = ??? + val mx = ml + var xs: ListT[Int] = ??? + var ys: ListT { type Elem = Int } = xs + xs = ys + val mm: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map[Int, Boolean] + val mm2: (Int => Boolean) => ListT[Int] => ListT[Boolean] = mx.map + } +} + + +object higherKinded { + + type Untyped = Null + + class Tree[type -Attr >: Untyped] { + type ThisType <: Tree + def withString(s: String): ThisType[Attr = String] = withString(s) + } +/* + class Ident[-Attr >: Untyped] extends Tree[Attr] { + type ThisType = Ident + } + + val id = new Ident[Integer] + + val y = id.withString("abc") + + val z: Ident[String] = y + + val zz: tpd.Tree = y + + abstract class Instance[T >: Untyped] {g + type Tree = higherKinded.Tree[T] + } + + object tpd extends Instance[String] + + def transform(tree: Tree[String]) = { + val tree1 = tree.withString("") + tree1: Tree[String] + } +*/ +} + diff --git a/tests/pos/named-params.scala b/tests/pos/named-params.scala new file mode 100644 index 000000000..bcd64ea4b --- /dev/null +++ b/tests/pos/named-params.scala @@ -0,0 +1,46 @@ +package namedparams + +class C[type Elem, type Value](val elem: Elem) { + def toVal: Elem = ??? +} + +class D[type Elem, V](elem: Elem) extends C[Elem, V](elem) + +object Test { + val c = new C[String, String]("A") { + override def toVal = elem + } + val x: c.Elem = c.elem + + val c2: C { type Elem = String } = c + + val c3 = new C[Elem = String, Value = Int]("B") + val c4 = new C[Elem = String]("C") + val x2: c2.Elem = c2.elem + + def d1[E, V](x: E) = new D[E, V](x) + def d2[E, V](x: E) = new C[Elem = E, Value = V](x) + + val y1 = d1[Int, String](1) + val y2 = d1[E = Int](2) + val y3 = d1[V = String](3) + val z1 = d2[E = Int, V = String](1) + val z2 = d2[V = String, E = Int](1) + val z3 = d2[E = Int](1) + val z4 = d2[V = Int]("AAA") + val z5 = d2[E = Int][V = String](1) +} + +// Adapated from i94-nada +trait Test1 { + trait Monad[type Elem] { + def unit: Elem + } + sealed abstract class Either[A,B] + case class Left[A,B](unit: A) extends Either[A,B] with Monad[A] + case class Right[A,B](unit: B) extends Either[A,B] with Monad[B] + def flatMap[X,Y,M <: Monad](m: M[Elem = X], f: X => M[Elem = Y]): M[Elem = Y] = f(m.unit) + println(flatMap(Left(1), {x: Int => Left(x)})) +} + + diff --git a/tests/pos/overloaddefault.scala b/tests/pos/overloaddefault.scala new file mode 100644 index 000000000..ed539719d --- /dev/null +++ b/tests/pos/overloaddefault.scala @@ -0,0 +1,15 @@ +trait Scope +class MScope extends Scope + +case class CI(pre: Int, decls: Scope) { + def derivedCI(pre: Int) = new CI(pre, decls) + def derivedCI(pre: Int = this.pre, decls: Scope = this.decls) = new CI(pre, decls) +} + +object Test { + def ci = new CI(1, new MScope) + val decls1 = new MScope + ci.derivedCI(2, decls = decls1) + ci.derivedCI(pre = 2) + ci.derivedCI(decls = decls1) +} diff --git a/tests/pos/t2991.scala b/tests/pos/t2991.scala new file mode 100644 index 000000000..f11b082c6 --- /dev/null +++ b/tests/pos/t2991.scala @@ -0,0 +1,17 @@ +class X { + def f(x: AnyRef) = x.toString + def f(x: AnyRef, y: AnyRef*) = y.mkString(x.toString) +} + +class Y { + def f(x: Int) = x.toString + def f(x: Int, y: Int*) = y.mkString(x.toString) +} + +object Test { + val x: AnyRef = "a" + val res0 = new X + res0.f(x) + val res1 = new Y + res1.f(5) +} |