aboutsummaryrefslogtreecommitdiff
path: root/src/dotty/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/dotty/tools')
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala8
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala4
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala24
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala43
-rw-r--r--src/dotty/tools/dotc/core/TypeErasure.scala2
-rw-r--r--src/dotty/tools/dotc/core/Types.scala23
-rw-r--r--src/dotty/tools/dotc/core/tasty/NameBuffer.scala18
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala2
-rw-r--r--src/dotty/tools/dotc/reporting/diagnostic/messages.scala128
-rw-r--r--src/dotty/tools/dotc/transform/ExtensionMethods.scala20
-rw-r--r--src/dotty/tools/dotc/transform/PatternMatcher.scala22
-rw-r--r--src/dotty/tools/dotc/transform/TreeTransform.scala4
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala4
-rw-r--r--src/dotty/tools/dotc/typer/ProtoTypes.scala7
-rw-r--r--src/dotty/tools/dotc/typer/RefChecks.scala2
-rw-r--r--src/dotty/tools/dotc/typer/TypeAssigner.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala10
17 files changed, 248 insertions, 75 deletions
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index af34164dc..8b8e0b318 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -433,11 +433,11 @@ object desugar {
if (!mods.is(Implicit))
Nil
else if (ctx.owner is Package) {
- ctx.error("implicit classes may not be toplevel", cdef.pos)
+ ctx.error(TopLevelImplicitClass(cdef), cdef.pos)
Nil
}
else if (isCaseClass) {
- ctx.error("implicit classes may not be case classes", cdef.pos)
+ ctx.error(ImplicitCaseClass(cdef), cdef.pos)
Nil
}
else
@@ -497,7 +497,7 @@ object desugar {
.withPos(mdef.pos)
val ValDef(selfName, selfTpt, _) = tmpl.self
val selfMods = tmpl.self.mods
- if (!selfTpt.isEmpty) ctx.error("object definition may not have a self type", tmpl.self.pos)
+ if (!selfTpt.isEmpty) ctx.error(ObjectMayNotHaveSelfType(mdef), tmpl.self.pos)
val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(name)), tmpl.self.rhs)
.withMods(selfMods)
.withPos(tmpl.self.pos orElse tmpl.pos.startPos)
@@ -931,7 +931,7 @@ object desugar {
val arity = ts.length
def tupleTypeRef = defn.TupleType(arity)
if (arity > Definitions.MaxTupleArity) {
- ctx.error(s"tuple too long (max allowed: ${Definitions.MaxTupleArity})", tree.pos)
+ ctx.error(TupleTooLong(ts), tree.pos)
unitLiteral
} else if (arity == 1) ts.head
else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index 1fb3557db..2c02e7d1e 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -48,7 +48,7 @@ object Trees {
* the existing tree transparently, assigning its `tpe` field,
* provided it was `null` before.
* - It is impossible to embed untyped trees in typed ones.
- * - Typed trees can be embedded untyped ones provided they are rooted
+ * - Typed trees can be embedded in untyped ones provided they are rooted
* in a TypedSplice node.
* - Type checking an untyped tree should remove all embedded `TypedSplice`
* nodes.
@@ -853,7 +853,7 @@ object Trees {
val cpy: TreeCopier
- /** A class for copying trees. The copy methods avid creating a new tree
+ /** A class for copying trees. The copy methods avoid creating a new tree
* If all arguments stay the same.
*
* Note: Some of the copy methods take a context.
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index 3835d553c..0e155b9e1 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -162,7 +162,8 @@ trait ConstraintHandling {
/** Solve constraint set for given type parameter `param`.
* If `fromBelow` is true the parameter is approximated by its lower bound,
* otherwise it is approximated by its upper bound. However, any occurrences
- * of the parameter in a refinement somewhere in the bound are removed.
+ * of the parameter in a refinement somewhere in the bound are removed. Also
+ * wildcard types in bounds are approximated by their upper or lower bounds.
* (Such occurrences can arise for F-bounded types).
* The constraint is left unchanged.
* @return the instantiating type
@@ -174,6 +175,27 @@ trait ConstraintHandling {
def apply(tp: Type) = mapOver {
tp match {
case tp: RefinedType if param occursIn tp.refinedInfo => tp.parent
+ case tp: WildcardType =>
+ val bounds = tp.optBounds.orElse(TypeBounds.empty).bounds
+ // Try to instantiate the wildcard to a type that is known to conform to it.
+ // This means:
+ // If fromBelow is true, we minimize the type overall
+ // Hence, if variance < 0, pick the maximal safe type: bounds.lo
+ // (i.e. the whole bounds range is over the type)
+ // if variance > 0, pick the minimal safe type: bounds.hi
+ // (i.e. the whole bounds range is under the type)
+ // if variance == 0, pick bounds.lo anyway (this is arbitrary but in line with
+ // the principle that we pick the smaller type when in doubt).
+ // If fromBelow is false, we maximize the type overall and reverse the bounds
+ // if variance != 0. For variance == 0, we still minimize.
+ // In summary we pick the bound given by this table:
+ //
+ // variance | -1 0 1
+ // ------------------------
+ // from below | lo lo hi
+ // from above | hi lo lo
+ //
+ if (variance == 0 || fromBelow == (variance < 0)) bounds.lo else bounds.hi
case _ => tp
}
}
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index b495f00d0..1980fe50d 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -965,28 +965,29 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* Test that the resulting bounds are still satisfiable.
*/
private def narrowGADTBounds(tr: NamedType, bound: Type, isUpper: Boolean): Boolean =
- ctx.mode.is(Mode.GADTflexible) && {
- val tparam = tr.symbol
- typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
- if (bound.isRef(tparam)) false
- else bound match {
- case bound: TypeRef
- if bound.symbol.is(BindDefinedType) && ctx.gadt.bounds.contains(bound.symbol) &&
- !tr.symbol.is(BindDefinedType) =>
- // Avoid having pattern-bound types in gadt bounds,
- // as these might be eliminated once the pattern is typechecked.
- // Pattern-bound type symbols should be narrowed first, only if that fails
- // should symbols in the environment be constrained.
- narrowGADTBounds(bound, tr, !isUpper)
- case _ =>
- val oldBounds = ctx.gadt.bounds(tparam)
- val newBounds =
- if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
- else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
- isSubType(newBounds.lo, newBounds.hi) &&
- { ctx.gadt.setBounds(tparam, newBounds); true }
+ ctx.mode.is(Mode.GADTflexible) && !frozenConstraint && {
+ val tparam = tr.symbol
+ typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
+ if (bound.isRef(tparam)) false
+ else bound match {
+ case bound: TypeRef
+ if bound.symbol.is(BindDefinedType) &&
+ ctx.gadt.bounds.contains(bound.symbol) &&
+ !tr.symbol.is(BindDefinedType) =>
+ // Avoid having pattern-bound types in gadt bounds,
+ // as these might be eliminated once the pattern is typechecked.
+ // Pattern-bound type symbols should be narrowed first, only if that fails
+ // should symbols in the environment be constrained.
+ narrowGADTBounds(bound, tr, !isUpper)
+ case _ =>
+ val oldBounds = ctx.gadt.bounds(tparam)
+ val newBounds =
+ if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
+ else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
+ isSubType(newBounds.lo, newBounds.hi) &&
+ { ctx.gadt.setBounds(tparam, newBounds); true }
+ }
}
- }
// Tests around `matches`
diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala
index fd5fcb921..254ea3277 100644
--- a/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -417,7 +417,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
// See doc comment for ElimByName for speculation how we could improve this.
else MethodType(Nil, Nil, eraseResult(rt))
case tp: PolyType =>
- this(tp.resultType) match {
+ eraseResult(tp.resultType) match {
case rt: MethodType => rt
case rt => MethodType(Nil, Nil, rt)
}
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index d242843e5..38913a7d0 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -239,9 +239,14 @@ object Types {
/** The parts of this type which are type or term refs and which
* satisfy predicate `p`.
+ *
+ * @param p The predicate to satisfy
+ * @param excludeLowerBounds If set to true, the lower bounds of abstract
+ * types will be ignored.
*/
- def namedPartsWith(p: NamedType => Boolean)(implicit ctx: Context): collection.Set[NamedType] =
- new NamedPartsAccumulator(p).apply(mutable.LinkedHashSet(), this)
+ def namedPartsWith(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context): collection.Set[NamedType] =
+ new NamedPartsAccumulator(p, excludeLowerBounds).apply(mutable.LinkedHashSet(), this)
/** Map function `f` over elements of an AndType, rebuilding with function `g` */
def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
@@ -615,13 +620,13 @@ object Types {
/** The set of abstract term members of this type. */
final def abstractTermMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTermMembers") {
memberDenots(abstractTermNameFilter,
- (name, buf) => buf ++= member(name).altsWith(_ is Deferred))
+ (name, buf) => buf ++= nonPrivateMember(name).altsWith(_ is Deferred))
}
/** The set of abstract type members of this type. */
final def abstractTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTypeMembers") {
memberDenots(abstractTypeNameFilter,
- (name, buf) => buf += member(name).asSingleDenotation)
+ (name, buf) => buf += nonPrivateMember(name).asSingleDenotation)
}
/** The set of abstract type members of this type. */
@@ -3710,7 +3715,8 @@ object Types {
def apply(x: Boolean, tp: Type) = x || tp.isUnsafeNonvariant || foldOver(x, tp)
}
- class NamedPartsAccumulator(p: NamedType => Boolean)(implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
+ class NamedPartsAccumulator(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
override def stopAtStatic = false
def maybeAdd(x: mutable.Set[NamedType], tp: NamedType) = if (p(tp)) x += tp else x
val seen: mutable.Set[Type] = mutable.Set()
@@ -3723,7 +3729,8 @@ object Types {
apply(foldOver(maybeAdd(x, tp), tp), tp.underlying)
case tp: TypeRef =>
foldOver(maybeAdd(x, tp), tp)
- case TypeBounds(_, hi) =>
+ case TypeBounds(lo, hi) =>
+ if (!excludeLowerBounds) apply(x, lo)
apply(x, hi)
case tp: ThisType =>
apply(x, tp.tref)
@@ -3756,7 +3763,7 @@ object Types {
object abstractTypeNameFilter extends NameFilter {
def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
name.isTypeName && {
- val mbr = pre.member(name)
+ val mbr = pre.nonPrivateMember(name)
(mbr.symbol is Deferred) && mbr.info.isInstanceOf[RealTypeBounds]
}
}
@@ -3773,7 +3780,7 @@ object Types {
/** A filter for names of deferred term definitions of a given type */
object abstractTermNameFilter extends NameFilter {
def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
- name.isTermName && (pre member name).hasAltWith(_.symbol is Deferred)
+ name.isTermName && pre.nonPrivateMember(name).hasAltWith(_.symbol is Deferred)
}
object typeNameFilter extends NameFilter {
diff --git a/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
index 69fd63805..3ff7298ce 100644
--- a/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
+++ b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -12,6 +12,7 @@ import TastyName._
import TastyFormat._
class NameBuffer extends TastyBuffer(10000) {
+ import NameBuffer._
private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
@@ -40,13 +41,12 @@ class NameBuffer extends TastyBuffer(10000) {
nameIndex(name)
}
- private def withLength(op: => Unit): Unit = {
+ private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = {
val lengthAddr = currentAddr
- writeByte(0)
+ for (i <- 0 until lengthWidth) writeByte(0)
op
val length = currentAddr.index - lengthAddr.index - 1
- assert(length < 128)
- putNat(lengthAddr, length, 1)
+ putNat(lengthAddr, length, lengthWidth)
}
def writeNameRef(ref: NameRef) = writeNat(ref.index)
@@ -64,7 +64,9 @@ class NameBuffer extends TastyBuffer(10000) {
withLength { writeNameRef(qualified); writeNameRef(selector) }
case Signed(original, params, result) =>
writeByte(SIGNED)
- withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }
+ withLength(
+ { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) },
+ if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2)
case Expanded(prefix, original) =>
writeByte(EXPANDED)
withLength { writeNameRef(prefix); writeNameRef(original) }
@@ -91,3 +93,9 @@ class NameBuffer extends TastyBuffer(10000) {
}
}
}
+
+object NameBuffer {
+ private val maxIndexWidth = 3 // allows name indices up to 2^21.
+ private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer
+ private val maxNumInByte = (1 << payloadBitsPerByte) - 1
+}
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 507a2e80c..46e7512f3 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -2061,7 +2061,7 @@ object Parsers {
def templateBody(): (ValDef, List[Tree]) = {
val r = inDefScopeBraces { templateStatSeq() }
if (in.token == WITH) {
- syntaxError("early definitions are not supported; use trait parameters instead")
+ syntaxError(EarlyDefinitionsNotSupported())
in.nextToken()
template(emptyConstructor)
}
diff --git a/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
index 9cfac4801..633501295 100644
--- a/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
+++ b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
@@ -5,11 +5,12 @@ package diagnostic
import dotc.core._
import Contexts.Context, Decorators._, Symbols._, Names._, Types._
+import ast.untpd.{Modifiers, ModuleDef}
import util.{SourceFile, NoSource}
import util.{SourcePosition, NoSourcePosition}
import config.Settings.Setting
import interfaces.Diagnostic.{ERROR, WARNING, INFO}
-import printing.SyntaxHighlighting._
+import printing.Highlighting._
import printing.Formatting
object messages {
@@ -274,4 +275,129 @@ object messages {
val explanation = ""
}
+
+ case class EarlyDefinitionsNotSupported()(implicit ctx:Context) extends Message(9) {
+ val kind = "Syntax"
+
+ val msg = "early definitions are not supported; use trait parameters instead"
+
+ val code1 =
+ """|trait Logging {
+ | val f: File
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class B extends Logging {
+ | val f = new File("log.data") // triggers a null pointer exception
+ |}
+ |
+ |class C extends {
+ | val f = new File("log.data") // early definition gets around the null pointer exception
+ |} with Logging""".stripMargin
+
+ val code2 =
+ """|trait Logging(f: File) {
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class C extends Logging(new File("log.data"))""".stripMargin
+
+ val explanation =
+ hl"""Earlier versions of Scala did not support trait parameters and "early definitions" (also known as "early initializers")
+ |were used as an alternative.
+ |
+ |Example of old syntax:
+ |
+ |$code1
+ |
+ |The above code can now be written as:
+ |
+ |$code2
+ |""".stripMargin
+ }
+
+ def implicitClassRestrictionsText(implicit ctx: Context) =
+ hl"""${NoColor("For a full list of restrictions on implicit classes visit")}
+ | ${Blue("http://docs.scala-lang.org/overviews/core/implicit-classes.html")}""".stripMargin
+
+ case class TopLevelImplicitClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(10) {
+ val kind = "Syntax"
+
+ val msg = hl"""|An ${"implicit class"} may not be top-level"""
+
+ val explanation = {
+ val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef
+ val exampleArgs = constr0.vparamss(0).map(_.withMods(Modifiers()).show).mkString(", ")
+ def defHasBody[T] = impl.body.exists(!_.isEmpty)
+ val exampleBody = if (defHasBody) "{\n ...\n }" else ""
+ hl"""|There may not be any method, member or object in scope with the same name as the
+ |implicit class and a case class automatically gets a companion object with the same name
+ |created by the compiler which would cause a naming conflict if it were allowed.
+ |
+ |""".stripMargin + implicitClassRestrictionsText + hl"""|
+ |
+ |To resolve the conflict declare ${cdef.name} inside of an ${"object"} then import the class
+ |from the object at the use site if needed, for example:
+ |
+ |object Implicits {
+ | implicit class ${cdef.name}($exampleArgs)$exampleBody
+ |}
+ |
+ |// At the use site:
+ |import Implicits.${cdef.name}""".stripMargin
+ }
+ }
+
+ case class ImplicitCaseClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(11) {
+ val kind = "Syntax"
+
+ val msg = hl"""|A ${"case class"} may not be defined as ${"implicit"}"""
+
+ val explanation =
+ hl"""|implicit classes may not be case classes. Instead use a plain class:
+ | example: implicit class ${cdef.name}...
+ |
+ |""".stripMargin + implicitClassRestrictionsText
+ }
+
+ case class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(implicit ctx: Context)
+ extends Message(12) {
+ val kind = "Syntax"
+
+ val msg = hl"""|${"objects"} must not have a ${"self type"}"""
+
+ val explanation = {
+ val ModuleDef(name, tmpl) = mdef
+ val ValDef(_, selfTpt, _) = tmpl.self
+ hl"""|objects must not have a ${"self type"}:
+ |
+ |Consider these alternative solutions:
+ | - Create a trait or a class instead of an object
+ | - Let the object extend a trait containing the self type:
+ | example: object $name extends ${selfTpt.show}""".stripMargin
+ }
+ }
+
+ case class TupleTooLong(ts: List[untpd.Tree])(implicit ctx: Context)
+ extends Message(13) {
+ import Definitions.MaxTupleArity
+ val kind = "Syntax"
+
+ val msg = hl"""|A ${"tuple"} cannot have more than ${MaxTupleArity} members"""
+
+ val explanation = {
+ val members = ts.map(_.showSummary).grouped(MaxTupleArity)
+ val nestedRepresentation = members.map(_.mkString(", ")).mkString(")(")
+ hl"""|This restriction will be removed in the future.
+ |Currently it is possible to use nested tuples when more than ${MaxTupleArity} are needed, for example:
+ |
+ | ((${nestedRepresentation}))""".stripMargin
+ }
+ }
}
diff --git a/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index 62a21198d..5ae4e8a54 100644
--- a/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -32,6 +32,9 @@ import SymUtils._
* in [[ElimErasedValueType]].
* This is different from the implementation of value classes in Scala 2
* (see SIP-15) which uses `asInstanceOf` which does not typecheck.
+ *
+ * Finally, if the constructor of a value class is private pr protected
+ * it is widened to public.
*/
class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransformer =>
@@ -96,11 +99,18 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
case _ =>
moduleClassSym
}
- case ref: SymDenotation
- if isMethodWithExtension(ref) && ref.hasAnnotation(defn.TailrecAnnot) =>
- val ref1 = ref.copySymDenotation()
- ref1.removeAnnotation(defn.TailrecAnnot)
- ref1
+ case ref: SymDenotation =>
+ if (isMethodWithExtension(ref) && ref.hasAnnotation(defn.TailrecAnnot)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.removeAnnotation(defn.TailrecAnnot)
+ ref1
+ }
+ else if (ref.isConstructor && isDerivedValueClass(ref.owner) && ref.is(AccessFlags)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.resetFlag(AccessFlags)
+ ref1
+ }
+ else ref
case _ =>
ref
}
diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 49c0eabec..8636d5084 100644
--- a/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -240,17 +240,21 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
val isDefined = extractorMemberType(prev.tpe, nme.isDefined)
if ((isDefined isRef defn.BooleanClass) && getTp.exists) {
- val tmpSym = freshSym(prev.pos, prev.tpe, "o")
- val prevValue = ref(tmpSym).select("get".toTermName).ensureApplied
+ // isDefined and get may be overloaded
+ val getDenot = prev.tpe.member(nme.get).suchThat(_.info.isParameterless)
+ val isDefinedDenot = prev.tpe.member(nme.isDefined).suchThat(_.info.isParameterless)
- Block(
- List(ValDef(tmpSym, prev)),
- // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(
- ref(tmpSym).select(nme.isDefined),
- Block(List(ValDef(b.asTerm, prevValue)), next)
- )
+ val tmpSym = freshSym(prev.pos, prev.tpe, "o")
+ val prevValue = ref(tmpSym).select(getDenot.symbol).ensureApplied
+
+ Block(
+ List(ValDef(tmpSym, prev)),
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(
+ ref(tmpSym).select(isDefinedDenot.symbol),
+ Block(List(ValDef(b.asTerm, prevValue)), next)
)
+ )
} else {
assert(defn.isProductSubType(prev.tpe))
val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
diff --git a/src/dotty/tools/dotc/transform/TreeTransform.scala b/src/dotty/tools/dotc/transform/TreeTransform.scala
index 52a3ad94e..45fa3d607 100644
--- a/src/dotty/tools/dotc/transform/TreeTransform.scala
+++ b/src/dotty/tools/dotc/transform/TreeTransform.scala
@@ -42,7 +42,7 @@ object TreeTransforms {
* the general dispatch overhead as opposed to the concrete work done in transformations. So that leaves us with
* 0.2sec, or roughly 600M processor cycles.
*
- * Now, to the amount of work that needs to be done. The codebase produces of about 250'000 trees after typechecking.
+ * Now, to the amount of work that needs to be done. The codebase produces an average of about 250'000 trees after typechecking.
* Transformations are likely to make this bigger so let's assume 300K trees on average. We estimate to have about 100
* micro-transformations. Let's say 5 transformation groups of 20 micro-transformations each. (by comparison,
* scalac has in excess of 20 phases, and most phases do multiple transformations). There are then 30M visits
@@ -208,7 +208,7 @@ object TreeTransforms {
if (cls.getDeclaredMethods.exists(_.getName == name)) cls != classOf[TreeTransform]
else hasRedefinedMethod(cls.getSuperclass, name)
- /** Create an index array `next` of size one larger than teh size of `transforms` such that
+ /** Create an index array `next` of size one larger than the size of `transforms` such that
* for each index i, `next(i)` is the smallest index j such that
*
* i <= j
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index d6cf7fb2b..f3dceea71 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -343,7 +343,9 @@ trait ImplicitRunInfo { self: RunInfo =>
}
tp.classSymbols(liftingCtx) foreach addClassScope
case _ =>
- for (part <- tp.namedPartsWith(_.isType))
+ // We exclude lower bounds to conform to SLS 7.2:
+ // "The parts of a type T are: [...] if T is an abstract type, the parts of its upper bound"
+ for (part <- tp.namedPartsWith(_.isType, excludeLowerBounds = true))
comps ++= iscopeRefs(part)
}
comps
diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala
index d451d6abd..08f566d49 100644
--- a/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -400,9 +400,10 @@ object ProtoTypes {
if (mt.isDependent) tp
else {
val rt = normalize(mt.resultType, pt)
- if (pt.isInstanceOf[ApplyingProto])
- mt.derivedMethodType(mt.paramNames, mt.paramTypes, rt)
- else {
+ pt match {
+ case pt: IgnoredProto => mt
+ case pt: ApplyingProto => mt.derivedMethodType(mt.paramNames, mt.paramTypes, rt)
+ case _ =>
val ft = defn.FunctionOf(mt.paramTypes, rt)
if (mt.paramTypes.nonEmpty || ft <:< pt) ft else rt
}
diff --git a/src/dotty/tools/dotc/typer/RefChecks.scala b/src/dotty/tools/dotc/typer/RefChecks.scala
index 4d82a2d12..834bb37a8 100644
--- a/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -487,7 +487,7 @@ object RefChecks {
// abstract method, and a cursory examination of the difference reveals
// something obvious to us, let's make it more obvious to them.
val abstractParams = underlying.info.firstParamTypes
- val matchingName = clazz.info.member(underlying.name).alternatives
+ val matchingName = clazz.info.nonPrivateMember(underlying.name).alternatives
val matchingArity = matchingName filter { m =>
!m.symbol.is(Deferred) &&
m.info.firstParamTypes.length == abstractParams.length
diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 696ecfedb..861847b11 100644
--- a/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -416,7 +416,7 @@ trait TypeAssigner {
def assignType(tree: untpd.SeqLiteral, elems: List[Tree], elemtpt: Tree)(implicit ctx: Context) = {
val ownType = tree match {
- case tree: JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
+ case tree: untpd.JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
case _ => if (ctx.erasedTypes) defn.SeqType else defn.SeqType.appliedTo(elemtpt.tpe)
}
tree.withType(ownType)
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index 6be119319..6fb0dd7c7 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -577,16 +577,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def escapingRefs(block: Tree, localSyms: => List[Symbol])(implicit ctx: Context): collection.Set[NamedType] = {
- var hoisted: Set[Symbol] = Set()
lazy val locals = localSyms.toSet
- def leakingTypes(tp: Type): collection.Set[NamedType] =
- tp namedPartsWith (tp => locals.contains(tp.symbol))
- def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty
- def classLeaks(sym: ClassSymbol): Boolean =
- (ctx.owner is Method) || // can't hoist classes out of method bodies
- (sym.info.parents exists typeLeaks) ||
- (sym.info.decls.toList exists (t => typeLeaks(t.info)))
- leakingTypes(block.tpe)
+ block.tpe namedPartsWith (tp => locals.contains(tp.symbol))
}
/** Check that expression's type can be expressed without references to locally defined