aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/dotty/Pair.scala5
-rw-r--r--src/dotty/Singleton.scala5
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala8
-rw-r--r--src/dotty/tools/dotc/ast/untpd.scala50
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala24
-rw-r--r--src/dotty/tools/dotc/core/Definitions.scala39
-rw-r--r--src/dotty/tools/dotc/core/Flags.scala3
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala43
-rw-r--r--src/dotty/tools/dotc/core/Types.scala23
-rw-r--r--src/dotty/tools/dotc/core/tasty/NameBuffer.scala18
-rw-r--r--src/dotty/tools/dotc/parsing/JavaParsers.scala2
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala101
-rw-r--r--src/dotty/tools/dotc/reporting/diagnostic/messages.scala272
-rw-r--r--src/dotty/tools/dotc/transform/ExtensionMethods.scala20
-rw-r--r--src/dotty/tools/dotc/transform/PatternMatcher.scala22
-rw-r--r--src/dotty/tools/dotc/transform/PostTyper.scala13
-rw-r--r--src/dotty/tools/dotc/transform/TailRec.scala6
-rw-r--r--src/dotty/tools/dotc/typer/Checking.scala11
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala4
-rw-r--r--src/dotty/tools/dotc/typer/RefChecks.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala10
-rw-r--r--src/scala/annotation/internal/Alias.scala (renamed from src/dotty/annotation/internal/Alias.scala)2
-rw-r--r--src/scala/annotation/internal/AnnotationDefault.scala (renamed from src/dotty/annotation/internal/AnnotationDefault.scala)2
-rw-r--r--src/scala/annotation/internal/Body.scala (renamed from src/dotty/annotation/internal/Body.scala)2
-rw-r--r--src/scala/annotation/internal/Child.scala (renamed from src/dotty/annotation/internal/Child.scala)4
-rw-r--r--src/scala/annotation/internal/InlineParam.scala (renamed from src/dotty/annotation/internal/InlineParam.scala)2
-rw-r--r--src/scala/annotation/internal/Repeated.scala (renamed from src/dotty/annotation/internal/Repeated.scala)2
-rw-r--r--src/scala/annotation/internal/SourceFile.scala (renamed from src/dotty/annotation/internal/SourceFile.scala)2
-rw-r--r--src/scala/annotation/internal/UnsafeNonvariant.scala (renamed from src/dotty/annotation/internal/UnsafeNonvariant.scala)2
-rw-r--r--src/scalaShadowing/language.scala198
30 files changed, 747 insertions, 150 deletions
diff --git a/src/dotty/Pair.scala b/src/dotty/Pair.scala
deleted file mode 100644
index 2322fe169..000000000
--- a/src/dotty/Pair.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package dotty
-
-class Pair[T, U](x: T, y: U) {
-
-}
diff --git a/src/dotty/Singleton.scala b/src/dotty/Singleton.scala
deleted file mode 100644
index 4ba57a12d..000000000
--- a/src/dotty/Singleton.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package dotty
-
-class Singleton {
-
-}
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index af34164dc..8b8e0b318 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -433,11 +433,11 @@ object desugar {
if (!mods.is(Implicit))
Nil
else if (ctx.owner is Package) {
- ctx.error("implicit classes may not be toplevel", cdef.pos)
+ ctx.error(TopLevelImplicitClass(cdef), cdef.pos)
Nil
}
else if (isCaseClass) {
- ctx.error("implicit classes may not be case classes", cdef.pos)
+ ctx.error(ImplicitCaseClass(cdef), cdef.pos)
Nil
}
else
@@ -497,7 +497,7 @@ object desugar {
.withPos(mdef.pos)
val ValDef(selfName, selfTpt, _) = tmpl.self
val selfMods = tmpl.self.mods
- if (!selfTpt.isEmpty) ctx.error("object definition may not have a self type", tmpl.self.pos)
+ if (!selfTpt.isEmpty) ctx.error(ObjectMayNotHaveSelfType(mdef), tmpl.self.pos)
val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(name)), tmpl.self.rhs)
.withMods(selfMods)
.withPos(tmpl.self.pos orElse tmpl.pos.startPos)
@@ -931,7 +931,7 @@ object desugar {
val arity = ts.length
def tupleTypeRef = defn.TupleType(arity)
if (arity > Definitions.MaxTupleArity) {
- ctx.error(s"tuple too long (max allowed: ${Definitions.MaxTupleArity})", tree.pos)
+ ctx.error(TupleTooLong(ts), tree.pos)
unitLiteral
} else if (arity == 1) ts.head
else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
diff --git a/src/dotty/tools/dotc/ast/untpd.scala b/src/dotty/tools/dotc/ast/untpd.scala
index 852c3a346..6513dfdc3 100644
--- a/src/dotty/tools/dotc/ast/untpd.scala
+++ b/src/dotty/tools/dotc/ast/untpd.scala
@@ -94,9 +94,42 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
class InfixOpBlock(leftOperand: Tree, rightOp: Tree) extends Block(leftOperand :: Nil, rightOp)
// ----- Modifiers -----------------------------------------------------
+ /** Mod is intended to record syntactic information about modifiers, it's
+ * NOT a replacement of FlagSet.
+ *
+ * For any query about semantic information, check `flags` instead.
+ */
+ sealed abstract class Mod(val flags: FlagSet) extends Positioned
- /** Modifiers and annotations for definitions
- * @param flags The set flags
+ object Mod {
+ case class Private() extends Mod(Flags.Private)
+
+ case class Protected() extends Mod(Flags.Protected)
+
+ case class Val() extends Mod(Flags.EmptyFlags)
+
+ case class Var() extends Mod(Flags.Mutable)
+
+ case class Implicit(flag: FlagSet = Flags.ImplicitCommon) extends Mod(flag)
+
+ case class Final() extends Mod(Flags.Final)
+
+ case class Sealed() extends Mod(Flags.Sealed)
+
+ case class Override() extends Mod(Flags.Override)
+
+ case class Abstract() extends Mod(Flags.Abstract)
+
+ case class Lazy() extends Mod(Flags.Lazy)
+
+ case class Inline() extends Mod(Flags.Inline)
+
+ case class Type() extends Mod(Flags.EmptyFlags)
+ }
+
+ /** Modifiers and annotations for definitions
+ *
+ * @param flags The set flags
* @param privateWithin If a private or protected has is followed by a
* qualifier [q], the name q, "" as a typename otherwise.
* @param annotations The annotations preceding the modifiers
@@ -104,7 +137,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case class Modifiers (
flags: FlagSet = EmptyFlags,
privateWithin: TypeName = tpnme.EMPTY,
- annotations: List[Tree] = Nil) extends Positioned with Cloneable {
+ annotations: List[Tree] = Nil,
+ mods: List[Mod] = Nil) extends Positioned with Cloneable {
def is(fs: FlagSet): Boolean = flags is fs
def is(fc: FlagConjunction): Boolean = flags is fc
@@ -120,7 +154,15 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
if (this.flags == flags) this
else copy(flags = flags)
- def withAddedAnnotation(annot: Tree): Modifiers =
+ def withAddedMod(mod: Mod): Modifiers =
+ if (mods.exists(_ eq mod)) this
+ else withMods(mods :+ mod)
+
+ def withMods(ms: List[Mod]): Modifiers =
+ if (mods eq ms) this
+ else copy(mods = ms)
+
+ def withAddedAnnotation(annot: Tree): Modifiers =
if (annotations.exists(_ eq annot)) this
else withAnnotations(annotations :+ annot)
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index 3835d553c..0e155b9e1 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -162,7 +162,8 @@ trait ConstraintHandling {
/** Solve constraint set for given type parameter `param`.
* If `fromBelow` is true the parameter is approximated by its lower bound,
* otherwise it is approximated by its upper bound. However, any occurrences
- * of the parameter in a refinement somewhere in the bound are removed.
+ * of the parameter in a refinement somewhere in the bound are removed. Also
+ * wildcard types in bounds are approximated by their upper or lower bounds.
* (Such occurrences can arise for F-bounded types).
* The constraint is left unchanged.
* @return the instantiating type
@@ -174,6 +175,27 @@ trait ConstraintHandling {
def apply(tp: Type) = mapOver {
tp match {
case tp: RefinedType if param occursIn tp.refinedInfo => tp.parent
+ case tp: WildcardType =>
+ val bounds = tp.optBounds.orElse(TypeBounds.empty).bounds
+ // Try to instantiate the wildcard to a type that is known to conform to it.
+ // This means:
+ // If fromBelow is true, we minimize the type overall
+ // Hence, if variance < 0, pick the maximal safe type: bounds.lo
+ // (i.e. the whole bounds range is over the type)
+ // if variance > 0, pick the minimal safe type: bounds.hi
+ // (i.e. the whole bounds range is under the type)
+ // if variance == 0, pick bounds.lo anyway (this is arbitrary but in line with
+ // the principle that we pick the smaller type when in doubt).
+ // If fromBelow is false, we maximize the type overall and reverse the bounds
+ // if variance != 0. For variance == 0, we still minimize.
+ // In summary we pick the bound given by this table:
+ //
+ // variance | -1 0 1
+ // ------------------------
+ // from below | lo lo hi
+ // from above | hi lo lo
+ //
+ if (variance == 0 || fromBelow == (variance < 0)) bounds.lo else bounds.hi
case _ => tp
}
}
diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala
index 50746c61d..541d66306 100644
--- a/src/dotty/tools/dotc/core/Definitions.scala
+++ b/src/dotty/tools/dotc/core/Definitions.scala
@@ -140,6 +140,16 @@ class Definitions {
lazy val Sys_errorR = SysPackage.moduleClass.requiredMethodRef(nme.error)
def Sys_error(implicit ctx: Context) = Sys_errorR.symbol
+ /** The `scalaShadowing` package is used to safely modify classes and
+ * objects in scala so that they can be used from dotty. They will
+ * be visible as members of the `scala` package, replacing any objects
+ * or classes with the same name. But their binary artifacts are
+ * in `scalaShadowing` so they don't clash with the same-named `scala`
+ * members at runtime.
+ */
+ lazy val ScalaShadowingPackageVal = ctx.requiredPackage("scalaShadowing")
+ lazy val ScalaShadowingPackageClass = ScalaShadowingPackageVal.moduleClass.asClass
+
/** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter)
* because after erasure the Any and AnyVal references get remapped to the Object methods
* which would result in a double binding assertion failure.
@@ -410,8 +420,6 @@ class Definitions {
lazy val StringAdd_plusR = StringAddClass.requiredMethodRef(nme.raw.PLUS)
def StringAdd_+(implicit ctx: Context) = StringAdd_plusR.symbol
- lazy val PairType: TypeRef = ctx.requiredClassRef("dotty.Pair")
- def PairClass(implicit ctx: Context) = PairType.symbol.asClass
lazy val PartialFunctionType: TypeRef = ctx.requiredClassRef("scala.PartialFunction")
def PartialFunctionClass(implicit ctx: Context) = PartialFunctionType.symbol.asClass
lazy val AbstractPartialFunctionType: TypeRef = ctx.requiredClassRef("scala.runtime.AbstractPartialFunction")
@@ -450,17 +458,17 @@ class Definitions {
def StaticAnnotationClass(implicit ctx: Context) = StaticAnnotationType.symbol.asClass
// Annotation classes
- lazy val AliasAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Alias")
+ lazy val AliasAnnotType = ctx.requiredClassRef("scala.annotation.internal.Alias")
def AliasAnnot(implicit ctx: Context) = AliasAnnotType.symbol.asClass
- lazy val AnnotationDefaultAnnotType = ctx.requiredClassRef("dotty.annotation.internal.AnnotationDefault")
+ lazy val AnnotationDefaultAnnotType = ctx.requiredClassRef("scala.annotation.internal.AnnotationDefault")
def AnnotationDefaultAnnot(implicit ctx: Context) = AnnotationDefaultAnnotType.symbol.asClass
- lazy val BodyAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Body")
+ lazy val BodyAnnotType = ctx.requiredClassRef("scala.annotation.internal.Body")
def BodyAnnot(implicit ctx: Context) = BodyAnnotType.symbol.asClass
- lazy val ChildAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Child")
+ lazy val ChildAnnotType = ctx.requiredClassRef("scala.annotation.internal.Child")
def ChildAnnot(implicit ctx: Context) = ChildAnnotType.symbol.asClass
- lazy val CovariantBetweenAnnotType = ctx.requiredClassRef("dotty.annotation.internal.CovariantBetween")
+ lazy val CovariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.CovariantBetween")
def CovariantBetweenAnnot(implicit ctx: Context) = CovariantBetweenAnnotType.symbol.asClass
- lazy val ContravariantBetweenAnnotType = ctx.requiredClassRef("dotty.annotation.internal.ContravariantBetween")
+ lazy val ContravariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.ContravariantBetween")
def ContravariantBetweenAnnot(implicit ctx: Context) = ContravariantBetweenAnnotType.symbol.asClass
lazy val DeprecatedAnnotType = ctx.requiredClassRef("scala.deprecated")
def DeprecatedAnnot(implicit ctx: Context) = DeprecatedAnnotType.symbol.asClass
@@ -468,9 +476,9 @@ class Definitions {
def ImplicitNotFoundAnnot(implicit ctx: Context) = ImplicitNotFoundAnnotType.symbol.asClass
lazy val InlineAnnotType = ctx.requiredClassRef("scala.inline")
def InlineAnnot(implicit ctx: Context) = InlineAnnotType.symbol.asClass
- lazy val InlineParamAnnotType = ctx.requiredClassRef("dotty.annotation.internal.InlineParam")
+ lazy val InlineParamAnnotType = ctx.requiredClassRef("scala.annotation.internal.InlineParam")
def InlineParamAnnot(implicit ctx: Context) = InlineParamAnnotType.symbol.asClass
- lazy val InvariantBetweenAnnotType = ctx.requiredClassRef("dotty.annotation.internal.InvariantBetween")
+ lazy val InvariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.InvariantBetween")
def InvariantBetweenAnnot(implicit ctx: Context) = InvariantBetweenAnnotType.symbol.asClass
lazy val MigrationAnnotType = ctx.requiredClassRef("scala.annotation.migration")
def MigrationAnnot(implicit ctx: Context) = MigrationAnnotType.symbol.asClass
@@ -478,9 +486,9 @@ class Definitions {
def NativeAnnot(implicit ctx: Context) = NativeAnnotType.symbol.asClass
lazy val RemoteAnnotType = ctx.requiredClassRef("scala.remote")
def RemoteAnnot(implicit ctx: Context) = RemoteAnnotType.symbol.asClass
- lazy val RepeatedAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Repeated")
+ lazy val RepeatedAnnotType = ctx.requiredClassRef("scala.annotation.internal.Repeated")
def RepeatedAnnot(implicit ctx: Context) = RepeatedAnnotType.symbol.asClass
- lazy val SourceFileAnnotType = ctx.requiredClassRef("dotty.annotation.internal.SourceFile")
+ lazy val SourceFileAnnotType = ctx.requiredClassRef("scala.annotation.internal.SourceFile")
def SourceFileAnnot(implicit ctx: Context) = SourceFileAnnotType.symbol.asClass
lazy val ScalaSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaSignature")
def ScalaSignatureAnnot(implicit ctx: Context) = ScalaSignatureAnnotType.symbol.asClass
@@ -510,7 +518,7 @@ class Definitions {
def UncheckedStableAnnot(implicit ctx: Context) = UncheckedStableAnnotType.symbol.asClass
lazy val UncheckedVarianceAnnotType = ctx.requiredClassRef("scala.annotation.unchecked.uncheckedVariance")
def UncheckedVarianceAnnot(implicit ctx: Context) = UncheckedVarianceAnnotType.symbol.asClass
- lazy val UnsafeNonvariantAnnotType = ctx.requiredClassRef("dotty.annotation.internal.UnsafeNonvariant")
+ lazy val UnsafeNonvariantAnnotType = ctx.requiredClassRef("scala.annotation.internal.UnsafeNonvariant")
def UnsafeNonvariantAnnot(implicit ctx: Context) = UnsafeNonvariantAnnotType.symbol.asClass
lazy val VolatileAnnotType = ctx.requiredClassRef("scala.volatile")
def VolatileAnnot(implicit ctx: Context) = VolatileAnnotType.symbol.asClass
@@ -781,6 +789,11 @@ class Definitions {
if (!_isInitialized) {
// force initialization of every symbol that is synthesized or hijacked by the compiler
val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses()
+
+ // Enter all symbols from the scalaShadowing package in the scala package
+ for (m <- ScalaShadowingPackageClass.info.decls)
+ ScalaPackageClass.enter(m)
+
_isInitialized = true
}
}
diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala
index 3f4433708..63fbc98dc 100644
--- a/src/dotty/tools/dotc/core/Flags.scala
+++ b/src/dotty/tools/dotc/core/Flags.scala
@@ -544,6 +544,9 @@ object Flags {
/** An inline method */
final val InlineMethod = allOf(Inline, Method)
+ /** An inline parameter */
+ final val InlineParam = allOf(Inline, Param)
+
/** A parameter or parameter accessor */
final val ParamOrAccessor = Param | ParamAccessor
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index b495f00d0..1980fe50d 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -965,28 +965,29 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* Test that the resulting bounds are still satisfiable.
*/
private def narrowGADTBounds(tr: NamedType, bound: Type, isUpper: Boolean): Boolean =
- ctx.mode.is(Mode.GADTflexible) && {
- val tparam = tr.symbol
- typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
- if (bound.isRef(tparam)) false
- else bound match {
- case bound: TypeRef
- if bound.symbol.is(BindDefinedType) && ctx.gadt.bounds.contains(bound.symbol) &&
- !tr.symbol.is(BindDefinedType) =>
- // Avoid having pattern-bound types in gadt bounds,
- // as these might be eliminated once the pattern is typechecked.
- // Pattern-bound type symbols should be narrowed first, only if that fails
- // should symbols in the environment be constrained.
- narrowGADTBounds(bound, tr, !isUpper)
- case _ =>
- val oldBounds = ctx.gadt.bounds(tparam)
- val newBounds =
- if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
- else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
- isSubType(newBounds.lo, newBounds.hi) &&
- { ctx.gadt.setBounds(tparam, newBounds); true }
+ ctx.mode.is(Mode.GADTflexible) && !frozenConstraint && {
+ val tparam = tr.symbol
+ typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
+ if (bound.isRef(tparam)) false
+ else bound match {
+ case bound: TypeRef
+ if bound.symbol.is(BindDefinedType) &&
+ ctx.gadt.bounds.contains(bound.symbol) &&
+ !tr.symbol.is(BindDefinedType) =>
+ // Avoid having pattern-bound types in gadt bounds,
+ // as these might be eliminated once the pattern is typechecked.
+ // Pattern-bound type symbols should be narrowed first, only if that fails
+ // should symbols in the environment be constrained.
+ narrowGADTBounds(bound, tr, !isUpper)
+ case _ =>
+ val oldBounds = ctx.gadt.bounds(tparam)
+ val newBounds =
+ if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
+ else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
+ isSubType(newBounds.lo, newBounds.hi) &&
+ { ctx.gadt.setBounds(tparam, newBounds); true }
+ }
}
- }
// Tests around `matches`
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index d242843e5..38913a7d0 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -239,9 +239,14 @@ object Types {
/** The parts of this type which are type or term refs and which
* satisfy predicate `p`.
+ *
+ * @param p The predicate to satisfy
+ * @param excludeLowerBounds If set to true, the lower bounds of abstract
+ * types will be ignored.
*/
- def namedPartsWith(p: NamedType => Boolean)(implicit ctx: Context): collection.Set[NamedType] =
- new NamedPartsAccumulator(p).apply(mutable.LinkedHashSet(), this)
+ def namedPartsWith(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context): collection.Set[NamedType] =
+ new NamedPartsAccumulator(p, excludeLowerBounds).apply(mutable.LinkedHashSet(), this)
/** Map function `f` over elements of an AndType, rebuilding with function `g` */
def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
@@ -615,13 +620,13 @@ object Types {
/** The set of abstract term members of this type. */
final def abstractTermMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTermMembers") {
memberDenots(abstractTermNameFilter,
- (name, buf) => buf ++= member(name).altsWith(_ is Deferred))
+ (name, buf) => buf ++= nonPrivateMember(name).altsWith(_ is Deferred))
}
/** The set of abstract type members of this type. */
final def abstractTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTypeMembers") {
memberDenots(abstractTypeNameFilter,
- (name, buf) => buf += member(name).asSingleDenotation)
+ (name, buf) => buf += nonPrivateMember(name).asSingleDenotation)
}
/** The set of abstract type members of this type. */
@@ -3710,7 +3715,8 @@ object Types {
def apply(x: Boolean, tp: Type) = x || tp.isUnsafeNonvariant || foldOver(x, tp)
}
- class NamedPartsAccumulator(p: NamedType => Boolean)(implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
+ class NamedPartsAccumulator(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
override def stopAtStatic = false
def maybeAdd(x: mutable.Set[NamedType], tp: NamedType) = if (p(tp)) x += tp else x
val seen: mutable.Set[Type] = mutable.Set()
@@ -3723,7 +3729,8 @@ object Types {
apply(foldOver(maybeAdd(x, tp), tp), tp.underlying)
case tp: TypeRef =>
foldOver(maybeAdd(x, tp), tp)
- case TypeBounds(_, hi) =>
+ case TypeBounds(lo, hi) =>
+ if (!excludeLowerBounds) apply(x, lo)
apply(x, hi)
case tp: ThisType =>
apply(x, tp.tref)
@@ -3756,7 +3763,7 @@ object Types {
object abstractTypeNameFilter extends NameFilter {
def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
name.isTypeName && {
- val mbr = pre.member(name)
+ val mbr = pre.nonPrivateMember(name)
(mbr.symbol is Deferred) && mbr.info.isInstanceOf[RealTypeBounds]
}
}
@@ -3773,7 +3780,7 @@ object Types {
/** A filter for names of deferred term definitions of a given type */
object abstractTermNameFilter extends NameFilter {
def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
- name.isTermName && (pre member name).hasAltWith(_.symbol is Deferred)
+ name.isTermName && pre.nonPrivateMember(name).hasAltWith(_.symbol is Deferred)
}
object typeNameFilter extends NameFilter {
diff --git a/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
index 69fd63805..3ff7298ce 100644
--- a/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
+++ b/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -12,6 +12,7 @@ import TastyName._
import TastyFormat._
class NameBuffer extends TastyBuffer(10000) {
+ import NameBuffer._
private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
@@ -40,13 +41,12 @@ class NameBuffer extends TastyBuffer(10000) {
nameIndex(name)
}
- private def withLength(op: => Unit): Unit = {
+ private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = {
val lengthAddr = currentAddr
- writeByte(0)
+ for (i <- 0 until lengthWidth) writeByte(0)
op
val length = currentAddr.index - lengthAddr.index - 1
- assert(length < 128)
- putNat(lengthAddr, length, 1)
+ putNat(lengthAddr, length, lengthWidth)
}
def writeNameRef(ref: NameRef) = writeNat(ref.index)
@@ -64,7 +64,9 @@ class NameBuffer extends TastyBuffer(10000) {
withLength { writeNameRef(qualified); writeNameRef(selector) }
case Signed(original, params, result) =>
writeByte(SIGNED)
- withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }
+ withLength(
+ { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) },
+ if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2)
case Expanded(prefix, original) =>
writeByte(EXPANDED)
withLength { writeNameRef(prefix); writeNameRef(original) }
@@ -91,3 +93,9 @@ class NameBuffer extends TastyBuffer(10000) {
}
}
}
+
+object NameBuffer {
+ private val maxIndexWidth = 3 // allows name indices up to 2^21.
+ private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer
+ private val maxNumInByte = (1 << payloadBitsPerByte) - 1
+}
diff --git a/src/dotty/tools/dotc/parsing/JavaParsers.scala b/src/dotty/tools/dotc/parsing/JavaParsers.scala
index ed7cf9e3f..0be4226ed 100644
--- a/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -131,7 +131,7 @@ object JavaParsers {
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
def makeParam(name: TermName, tpt: Tree): ValDef =
- ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.PrivateLocalParamAccessor))
+ ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.ParamAccessor))
def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined) = {
val vparams = mapWithIndex(formals)((p, i) => makeSyntheticParam(i + 1, p))
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 507a2e80c..8fc99f072 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -334,7 +334,7 @@ object Parsers {
try op
finally {
placeholderParams match {
- case vd :: _ => syntaxError("unbound placeholder parameter", vd.pos)
+ case vd :: _ => syntaxError(UnboundPlaceholderParameter(), vd.pos)
case _ =>
}
placeholderParams = savedPlaceholderParams
@@ -623,7 +623,7 @@ object Parsers {
if (inPattern) Block(Nil, inBraces(pattern()))
else expr()
else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected")
+ ctx.error(InterpolatedStringError())
EmptyTree
}
})
@@ -1097,8 +1097,9 @@ object Parsers {
/** Expr ::= implicit Id `=>' Expr
* BlockResult ::= implicit Id [`:' InfixType] `=>' Block
*/
- def implicitClosure(start: Int, location: Location.Value): Tree = {
- val mods = atPos(start) { Modifiers(Implicit) }
+ def implicitClosure(start: Int, location: Location.Value, implicitMod: Option[Mod] = None): Tree = {
+ var mods = atPos(start) { Modifiers(Implicit) }
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
val id = termIdent()
val paramExpr =
if (location == Location.InBlock && in.token == COLON)
@@ -1177,7 +1178,7 @@ object Parsers {
case _ =>
if (isLiteral) literal()
else {
- syntaxErrorOrIncomplete("illegal start of simple expression")
+ syntaxErrorOrIncomplete(IllegalStartSimpleExpr(tokenString(in.token)))
errorTermTree
}
}
@@ -1326,7 +1327,7 @@ object Parsers {
if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
else {
- if (!wrappedEnums) syntaxErrorOrIncomplete("`yield' or `do' expected")
+ if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
ForDo(enums, expr())
}
}
@@ -1464,19 +1465,19 @@ object Parsers {
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- private def flagOfToken(tok: Int): FlagSet = tok match {
- case ABSTRACT => Abstract
- case FINAL => Final
- case IMPLICIT => ImplicitCommon
- case INLINE => Inline
- case LAZY => Lazy
- case OVERRIDE => Override
- case PRIVATE => Private
- case PROTECTED => Protected
- case SEALED => Sealed
+ private def modOfToken(tok: Int): Mod = tok match {
+ case ABSTRACT => Mod.Abstract()
+ case FINAL => Mod.Final()
+ case IMPLICIT => Mod.Implicit(ImplicitCommon)
+ case INLINE => Mod.Inline()
+ case LAZY => Mod.Lazy()
+ case OVERRIDE => Mod.Override()
+ case PRIVATE => Mod.Private()
+ case PROTECTED => Mod.Protected()
+ case SEALED => Mod.Sealed()
}
- /** Drop `private' modifier when followed by a qualifier.
+ /** Drop `private' modifier when followed by a qualifier.
* Contract `abstract' and `override' to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
@@ -1488,11 +1489,11 @@ object Parsers {
mods
private def addModifier(mods: Modifiers): Modifiers = {
- val flag = flagOfToken(in.token)
- if (mods is flag) syntaxError("repeated modifier")
- val res = addFlag(mods, flag)
- in.nextToken()
- res
+ val tok = in.token
+ val mod = atPos(in.skipToken()) { modOfToken(tok) }
+
+ if (mods is mod.flags) syntaxError(RepeatedModifier(mod.flags.toString))
+ addMod(mods, mod)
}
private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = (
@@ -1518,6 +1519,11 @@ object Parsers {
}
}
+ /** Always add the syntactic `mod`, but check and conditionally add semantic `mod.flags`
+ */
+ def addMod(mods: Modifiers, mod: Mod): Modifiers =
+ addFlag(mods, mod.flags).withAddedMod(mod)
+
/** AccessQualifier ::= "[" (Id | this) "]"
*/
def accessQualifierOpt(mods: Modifiers): Modifiers =
@@ -1614,8 +1620,8 @@ object Parsers {
mods =
atPos(start, in.offset) {
if (in.token == TYPE) {
- in.nextToken()
- mods | Param | ParamAccessor
+ val mod = atPos(in.skipToken()) { Mod.Type() }
+ (mods | Param | ParamAccessor).withAddedMod(mod)
} else {
if (mods.hasFlags) syntaxError("`type' expected")
mods | Param | PrivateLocal
@@ -1659,7 +1665,7 @@ object Parsers {
* Param ::= id `:' ParamType [`=' Expr]
*/
def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
- var implicitFlag = EmptyFlags
+ var implicitMod: Mod = null
var firstClauseOfCaseClass = ofCaseClass
var implicitOffset = -1 // use once
def param(): ValDef = {
@@ -1670,11 +1676,11 @@ object Parsers {
mods =
atPos(start, in.offset) {
if (in.token == VAL) {
- in.nextToken()
- mods
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ mods.withAddedMod(mod)
} else if (in.token == VAR) {
- in.nextToken()
- addFlag(mods, Mutable)
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ addMod(mods, mod)
} else {
if (!(mods.flags &~ (ParamAccessor | Inline)).isEmpty)
syntaxError("`val' or `var' expected")
@@ -1696,7 +1702,7 @@ object Parsers {
if (in.token == ARROW) {
if (owner.isTypeName && !(mods is Local))
syntaxError(s"${if (mods is Mutable) "`var'" else "`val'"} parameters may not be call-by-name")
- else if (!implicitFlag.isEmpty)
+ else if (implicitMod != null)
syntaxError("implicit parameters may not be call-by-name")
}
paramType()
@@ -1708,15 +1714,16 @@ object Parsers {
mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
implicitOffset = -1
}
- ValDef(name, tpt, default).withMods(addFlag(mods, implicitFlag))
+ if (implicitMod != null) mods = addMod(mods, implicitMod)
+ ValDef(name, tpt, default).withMods(mods)
}
}
def paramClause(): List[ValDef] = inParens {
if (in.token == RPAREN) Nil
else {
if (in.token == IMPLICIT) {
- implicitOffset = in.skipToken()
- implicitFlag = Implicit
+ implicitOffset = in.offset
+ implicitMod = atPos(in.skipToken()) { Mod.Implicit(Implicit) }
}
commaSeparated(param)
}
@@ -1726,7 +1733,7 @@ object Parsers {
if (in.token == LPAREN)
paramClause() :: {
firstClauseOfCaseClass = false
- if (implicitFlag.isEmpty) clauses() else Nil
+ if (implicitMod == null) clauses() else Nil
}
else Nil
}
@@ -1819,9 +1826,13 @@ object Parsers {
*/
def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
case VAL =>
- patDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ val mods1 = mods.withAddedMod(mod)
+ patDefOrDcl(start, mods1, in.getDocComment(start))
case VAR =>
- patDefOrDcl(start, posMods(start, addFlag(mods, Mutable)), in.getDocComment(start))
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ val mod1 = addMod(mods, mod)
+ patDefOrDcl(start, mod1, in.getDocComment(start))
case DEF =>
defDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
case TYPE =>
@@ -1898,7 +1909,7 @@ object Parsers {
else EmptyTree
}
else {
- if (!isExprIntro) syntaxError("missing return type", in.lastOffset)
+ if (!isExprIntro) syntaxError(MissingReturnType(), in.lastOffset)
accept(EQUALS)
expr()
}
@@ -2061,7 +2072,7 @@ object Parsers {
def templateBody(): (ValDef, List[Tree]) = {
val r = inDefScopeBraces { templateStatSeq() }
if (in.token == WITH) {
- syntaxError("early definitions are not supported; use trait parameters instead")
+ syntaxError(EarlyDefinitionsNotSupported())
in.nextToken()
template(emptyConstructor)
}
@@ -2184,8 +2195,11 @@ object Parsers {
stats.toList
}
- def localDef(start: Int, implicitFlag: FlagSet): Tree =
- defOrDcl(start, addFlag(defAnnotsMods(localModifierTokens), implicitFlag))
+ def localDef(start: Int, implicitFlag: FlagSet, implicitMod: Option[Mod] = None): Tree = {
+ var mods = addFlag(defAnnotsMods(localModifierTokens), implicitFlag)
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ defOrDcl(start, mods)
+ }
/** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
* BlockStat ::= Import
@@ -2205,9 +2219,10 @@ object Parsers {
stats += expr(Location.InBlock)
else if (isDefIntro(localModifierTokens))
if (in.token == IMPLICIT) {
- val start = in.skipToken()
- if (isIdent) stats += implicitClosure(start, Location.InBlock)
- else stats += localDef(start, ImplicitCommon)
+ val start = in.offset
+ val mod = atPos(in.skipToken()) { Mod.Implicit(ImplicitCommon) }
+ if (isIdent) stats += implicitClosure(start, Location.InBlock, Some(mod))
+ else stats += localDef(start, ImplicitCommon, Some(mod))
} else {
stats += localDef(in.offset, EmptyFlags)
}
diff --git a/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
index 9cfac4801..303ab0437 100644
--- a/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
+++ b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
@@ -5,11 +5,12 @@ package diagnostic
import dotc.core._
import Contexts.Context, Decorators._, Symbols._, Names._, Types._
+import ast.untpd.{Modifiers, ModuleDef}
import util.{SourceFile, NoSource}
import util.{SourcePosition, NoSourcePosition}
import config.Settings.Setting
import interfaces.Diagnostic.{ERROR, WARNING, INFO}
-import printing.SyntaxHighlighting._
+import printing.Highlighting._
import printing.Formatting
object messages {
@@ -274,4 +275,273 @@ object messages {
val explanation = ""
}
+
+ case class EarlyDefinitionsNotSupported()(implicit ctx:Context) extends Message(9) {
+ val kind = "Syntax"
+
+ val msg = "early definitions are not supported; use trait parameters instead"
+
+ val code1 =
+ """|trait Logging {
+ | val f: File
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class B extends Logging {
+ | val f = new File("log.data") // triggers a null pointer exception
+ |}
+ |
+ |class C extends {
+ | val f = new File("log.data") // early definition gets around the null pointer exception
+ |} with Logging""".stripMargin
+
+ val code2 =
+ """|trait Logging(f: File) {
+ | f.open()
+ | onExit(f.close())
+ | def log(msg: String) = f.write(msg)
+ |}
+ |
+ |class C extends Logging(new File("log.data"))""".stripMargin
+
+ val explanation =
+ hl"""Earlier versions of Scala did not support trait parameters and "early definitions" (also known as "early initializers")
+ |were used as an alternative.
+ |
+ |Example of old syntax:
+ |
+ |$code1
+ |
+ |The above code can now be written as:
+ |
+ |$code2
+ |""".stripMargin
+ }
+
+ def implicitClassRestrictionsText(implicit ctx: Context) =
+ hl"""${NoColor("For a full list of restrictions on implicit classes visit")}
+ | ${Blue("http://docs.scala-lang.org/overviews/core/implicit-classes.html")}""".stripMargin
+
+ case class TopLevelImplicitClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(10) {
+ val kind = "Syntax"
+
+ val msg = hl"""|An ${"implicit class"} may not be top-level"""
+
+ val explanation = {
+ val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef
+ val exampleArgs = constr0.vparamss(0).map(_.withMods(Modifiers()).show).mkString(", ")
+ def defHasBody[T] = impl.body.exists(!_.isEmpty)
+ val exampleBody = if (defHasBody) "{\n ...\n }" else ""
+ hl"""|There may not be any method, member or object in scope with the same name as the
+ |implicit class and a case class automatically gets a companion object with the same name
+ |created by the compiler which would cause a naming conflict if it were allowed.
+ |
+ |""".stripMargin + implicitClassRestrictionsText + hl"""|
+ |
+ |To resolve the conflict declare ${cdef.name} inside of an ${"object"} then import the class
+ |from the object at the use site if needed, for example:
+ |
+ |object Implicits {
+ | implicit class ${cdef.name}($exampleArgs)$exampleBody
+ |}
+ |
+ |// At the use site:
+ |import Implicits.${cdef.name}""".stripMargin
+ }
+ }
+
+ case class ImplicitCaseClass(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(11) {
+ val kind = "Syntax"
+
+ val msg = hl"""|A ${"case class"} may not be defined as ${"implicit"}"""
+
+ val explanation =
+ hl"""|implicit classes may not be case classes. Instead use a plain class:
+ | example: implicit class ${cdef.name}...
+ |
+ |""".stripMargin + implicitClassRestrictionsText
+ }
+
+ case class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(implicit ctx: Context)
+ extends Message(12) {
+ val kind = "Syntax"
+
+ val msg = hl"""|${"objects"} must not have a ${"self type"}"""
+
+ val explanation = {
+ val ModuleDef(name, tmpl) = mdef
+ val ValDef(_, selfTpt, _) = tmpl.self
+ hl"""|objects must not have a ${"self type"}:
+ |
+ |Consider these alternative solutions:
+ | - Create a trait or a class instead of an object
+ | - Let the object extend a trait containing the self type:
+ | example: object $name extends ${selfTpt.show}""".stripMargin
+ }
+ }
+
+ case class TupleTooLong(ts: List[untpd.Tree])(implicit ctx: Context)
+ extends Message(13) {
+ import Definitions.MaxTupleArity
+ val kind = "Syntax"
+
+ val msg = hl"""|A ${"tuple"} cannot have more than ${MaxTupleArity} members"""
+
+ val explanation = {
+ val members = ts.map(_.showSummary).grouped(MaxTupleArity)
+ val nestedRepresentation = members.map(_.mkString(", ")).mkString(")(")
+ hl"""|This restriction will be removed in the future.
+ |Currently it is possible to use nested tuples when more than ${MaxTupleArity} are needed, for example:
+ |
+ | ((${nestedRepresentation}))""".stripMargin
+ }
+ }
+
+ case class RepeatedModifier(modifier: String)(implicit ctx:Context) extends Message(14) {
+ val kind = "Syntax"
+
+ val msg = hl"""repeated modifier $modifier"""
+
+ val code1 = hl"""private private val Origin = Point(0, 0)"""
+
+ val code2 = hl"""private final val Origin = Point(0, 0)"""
+
+ val explanation =
+ hl"""This happens when you accidentally specify the same modifier twice.
+ |
+ |Example:
+ |
+ |$code1
+ |
+ |instead of
+ |
+ |$code2
+ |
+ |""".stripMargin
+ }
+
+ case class InterpolatedStringError()(implicit ctx:Context) extends Message(15) {
+ val kind = "Syntax"
+
+ val msg = "error in interpolated string: identifier or block expected"
+
+ val code1 = "s\"$new Point(0, 0)\""
+
+ val code2 = "s\"${new Point(0, 0)}\""
+
+ val explanation =
+ hl"""
+ |This usually happens when you forget to place your expressions inside curly braces.
+ |
+ |$code1
+ |
+ |should be written as
+ |
+ |$code2
+ |
+ |""".stripMargin
+
+ }
+
+ case class UnboundPlaceholderParameter()(implicit ctx:Context)
+ extends Message(16) {
+ val kind = "Syntax"
+
+ val msg = hl"unbound placeholder parameter; incorrect use of `_`"
+
+ val explanation =
+ hl"""The `_` placeholder syntax was used where it could not be bound.
+ |Consider explicitly writing the variable binding.
+ |
+ |This can be done by replacing `_` with a variable (eg. `x`)
+ |and adding ${"x =>"} where applicable.
+ |
+ |Example before:
+ |
+ |${"{ _ }"}
+ |
+ |Example after:
+ |
+ |${"x => { x }"}
+ |
+ |Another common occurrence for this error is defining a val with `_`:
+ |
+ |${"val a = _"}
+ |
+ |But this val definition isn't very useful, it can never be assigned
+ |another value. And thus will always remain uninitialized.
+ |Consider replacing the ${"val"} with ${"var"}:
+ |
+ |${"var a = _"}
+ |
+ |Note that this use of `_` is not placeholder syntax,
+ |but an uninitialized var definition
+ """.stripMargin
+ }
+
+ case class IllegalStartSimpleExpr(illegalToken: String)(implicit ctx: Context) extends Message(17) {
+ val kind = "Syntax"
+ val msg = "illegal start of simple expression"
+ val explanation = {
+ hl"""|An expression yields a value. In the case of the simple expression, this error
+ |commonly occurs when there's a missing parenthesis or brace. The reason being
+ |that a simple expression is one of the following:
+ |
+ |- Block
+ |- Expression in parenthesis
+ |- Identifier
+ |- Object creation
+ |- Literal
+ |
+ |which cannot start with ${Red(illegalToken)}.""".stripMargin
+ }
+ }
+
+ case class MissingReturnType()(implicit ctx:Context) extends Message(18) {
+ val kind = "Syntax"
+ val msg = "missing return type"
+ val explanation =
+ hl"""An abstract declaration must have a return type. For example:
+ |
+ |trait Shape {
+ | def area: Double // abstract declaration returning a ${"Double"}
+ |}""".stripMargin
+ }
+
+ case class YieldOrDoExpectedInForComprehension()(implicit ctx: Context) extends Message(19) {
+ val kind = "Syntax"
+ val msg = hl"${"yield"} or ${"do"} expected"
+
+ val explanation =
+ hl"""When the enumerators in a for comprehension are not placed in parentheses or
+ |braces, a ${"do"} or ${"yield"} statement is required after the enumerators section
+ |of the comprehension.
+ |
+ |You can save some keystrokes by omitting the parentheses and writing
+ |
+ |${"val numbers = for i <- 1 to 3 yield i"}
+ |
+ | instead of
+ |
+ |${"val numbers = for (i <- 1 to 3) yield i"}
+ |
+ |but the ${"yield"} keyword is still required.
+ |
+ |For comprehensions that simply perform a side effect without yielding anything
+ |can also be written without parentheses but a ${"do"} keyword has to be included.
+ |For example,
+ |
+ |${"for (i <- 1 to 3) println(i)"}
+ |
+ | can be written as
+ |
+ |${"for i <- 1 to 3 do println(i) // notice the 'do' keyword"}
+ |
+ |""".stripMargin
+ }
+
}
diff --git a/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index 62a21198d..5ae4e8a54 100644
--- a/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -32,6 +32,9 @@ import SymUtils._
* in [[ElimErasedValueType]].
* This is different from the implementation of value classes in Scala 2
* (see SIP-15) which uses `asInstanceOf` which does not typecheck.
+ *
+ * Finally, if the constructor of a value class is private pr protected
+ * it is widened to public.
*/
class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransformer =>
@@ -96,11 +99,18 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
case _ =>
moduleClassSym
}
- case ref: SymDenotation
- if isMethodWithExtension(ref) && ref.hasAnnotation(defn.TailrecAnnot) =>
- val ref1 = ref.copySymDenotation()
- ref1.removeAnnotation(defn.TailrecAnnot)
- ref1
+ case ref: SymDenotation =>
+ if (isMethodWithExtension(ref) && ref.hasAnnotation(defn.TailrecAnnot)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.removeAnnotation(defn.TailrecAnnot)
+ ref1
+ }
+ else if (ref.isConstructor && isDerivedValueClass(ref.owner) && ref.is(AccessFlags)) {
+ val ref1 = ref.copySymDenotation()
+ ref1.resetFlag(AccessFlags)
+ ref1
+ }
+ else ref
case _ =>
ref
}
diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 49c0eabec..8636d5084 100644
--- a/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -240,17 +240,21 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
val isDefined = extractorMemberType(prev.tpe, nme.isDefined)
if ((isDefined isRef defn.BooleanClass) && getTp.exists) {
- val tmpSym = freshSym(prev.pos, prev.tpe, "o")
- val prevValue = ref(tmpSym).select("get".toTermName).ensureApplied
+ // isDefined and get may be overloaded
+ val getDenot = prev.tpe.member(nme.get).suchThat(_.info.isParameterless)
+ val isDefinedDenot = prev.tpe.member(nme.isDefined).suchThat(_.info.isParameterless)
- Block(
- List(ValDef(tmpSym, prev)),
- // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(
- ref(tmpSym).select(nme.isDefined),
- Block(List(ValDef(b.asTerm, prevValue)), next)
- )
+ val tmpSym = freshSym(prev.pos, prev.tpe, "o")
+ val prevValue = ref(tmpSym).select(getDenot.symbol).ensureApplied
+
+ Block(
+ List(ValDef(tmpSym, prev)),
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(
+ ref(tmpSym).select(isDefinedDenot.symbol),
+ Block(List(ValDef(b.asTerm, prevValue)), next)
)
+ )
} else {
assert(defn.isProductSubType(prev.tpe))
val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala
index 51851a589..12d48d98e 100644
--- a/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -275,6 +275,19 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
case tpe => tpe
}
)
+ case Import(expr, selectors) =>
+ val exprTpe = expr.tpe
+ def checkIdent(ident: Ident): Unit = {
+ val name = ident.name.asTermName.encode
+ if (name != nme.WILDCARD && !exprTpe.member(name).exists && !exprTpe.member(name.toTypeName).exists)
+ ctx.error(s"${ident.name} is not a member of ${expr.show}", ident.pos)
+ }
+ selectors.foreach {
+ case ident: Ident => checkIdent(ident)
+ case Thicket((ident: Ident) :: _) => checkIdent(ident)
+ case _ =>
+ }
+ super.transform(tree)
case tree =>
super.transform(tree)
}
diff --git a/src/dotty/tools/dotc/transform/TailRec.scala b/src/dotty/tools/dotc/transform/TailRec.scala
index d99a48af3..fde4db811 100644
--- a/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/src/dotty/tools/dotc/transform/TailRec.scala
@@ -143,7 +143,11 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
newOwners = label :: Nil
).transform(rhsSemiTransformed)
})
- Block(List(labelDef), ref(label).appliedToArgss(vparamss0.map(_.map(x=> ref(x.symbol)))))
+ val callIntoLabel = (
+ if (dd.tparams.isEmpty) ref(label)
+ else ref(label).appliedToTypes(dd.tparams.map(_.tpe))
+ ).appliedToArgss(vparamss0.map(_.map(x=> ref(x.symbol))))
+ Block(List(labelDef), callIntoLabel)
}} else {
if (mandatory) ctx.error(
"TailRec optimisation not applicable, method not tail recursive",
diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala
index 7899174f5..3ebae733f 100644
--- a/src/dotty/tools/dotc/typer/Checking.scala
+++ b/src/dotty/tools/dotc/typer/Checking.scala
@@ -482,10 +482,13 @@ trait Checking {
/** Check that `tree` is a pure expression of constant type */
def checkInlineConformant(tree: Tree, what: => String)(implicit ctx: Context): Unit =
- tree.tpe.widenTermRefExpr match {
- case tp: ConstantType if isPureExpr(tree) => // ok
- case tp if defn.isFunctionType(tp) && isPureExpr(tree) => // ok
- case _ => ctx.error(em"$what must be a constant expression or a function", tree.pos)
+ tree.tpe match {
+ case tp: TermRef if tp.symbol.is(InlineParam) => // ok
+ case tp => tp.widenTermRefExpr match {
+ case tp: ConstantType if isPureExpr(tree) => // ok
+ case tp if defn.isFunctionType(tp) && isPureExpr(tree) => // ok
+ case _ => ctx.error(em"$what must be a constant expression or a function", tree.pos)
+ }
}
/** Check that class does not define same symbol twice */
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index d6cf7fb2b..f3dceea71 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -343,7 +343,9 @@ trait ImplicitRunInfo { self: RunInfo =>
}
tp.classSymbols(liftingCtx) foreach addClassScope
case _ =>
- for (part <- tp.namedPartsWith(_.isType))
+ // We exclude lower bounds to conform to SLS 7.2:
+ // "The parts of a type T are: [...] if T is an abstract type, the parts of its upper bound"
+ for (part <- tp.namedPartsWith(_.isType, excludeLowerBounds = true))
comps ++= iscopeRefs(part)
}
comps
diff --git a/src/dotty/tools/dotc/typer/RefChecks.scala b/src/dotty/tools/dotc/typer/RefChecks.scala
index 4d82a2d12..834bb37a8 100644
--- a/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -487,7 +487,7 @@ object RefChecks {
// abstract method, and a cursory examination of the difference reveals
// something obvious to us, let's make it more obvious to them.
val abstractParams = underlying.info.firstParamTypes
- val matchingName = clazz.info.member(underlying.name).alternatives
+ val matchingName = clazz.info.nonPrivateMember(underlying.name).alternatives
val matchingArity = matchingName filter { m =>
!m.symbol.is(Deferred) &&
m.info.firstParamTypes.length == abstractParams.length
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index 6be119319..6fb0dd7c7 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -577,16 +577,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def escapingRefs(block: Tree, localSyms: => List[Symbol])(implicit ctx: Context): collection.Set[NamedType] = {
- var hoisted: Set[Symbol] = Set()
lazy val locals = localSyms.toSet
- def leakingTypes(tp: Type): collection.Set[NamedType] =
- tp namedPartsWith (tp => locals.contains(tp.symbol))
- def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty
- def classLeaks(sym: ClassSymbol): Boolean =
- (ctx.owner is Method) || // can't hoist classes out of method bodies
- (sym.info.parents exists typeLeaks) ||
- (sym.info.decls.toList exists (t => typeLeaks(t.info)))
- leakingTypes(block.tpe)
+ block.tpe namedPartsWith (tp => locals.contains(tp.symbol))
}
/** Check that expression's type can be expressed without references to locally defined
diff --git a/src/dotty/annotation/internal/Alias.scala b/src/scala/annotation/internal/Alias.scala
index 8be83960f..e3f56e70c 100644
--- a/src/dotty/annotation/internal/Alias.scala
+++ b/src/scala/annotation/internal/Alias.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/AnnotationDefault.scala b/src/scala/annotation/internal/AnnotationDefault.scala
index 7409b2f96..5280d091c 100644
--- a/src/dotty/annotation/internal/AnnotationDefault.scala
+++ b/src/scala/annotation/internal/AnnotationDefault.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/Body.scala b/src/scala/annotation/internal/Body.scala
index 7e26b02f2..b6aa0c0fb 100644
--- a/src/dotty/annotation/internal/Body.scala
+++ b/src/scala/annotation/internal/Body.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/Child.scala b/src/scala/annotation/internal/Child.scala
index 9295de73e..c90871945 100644
--- a/src/dotty/annotation/internal/Child.scala
+++ b/src/scala/annotation/internal/Child.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
@@ -9,7 +9,7 @@ import scala.annotation.Annotation
* case class B() extends A
* case class C() extends A
*
- * Then the class symbol `A` would carry the annotations
+ * Then the class symbol `A` would carry the annotations
* `@Child[Bref] @Child[Cref]` where `Bref`, `Cref` are TypeRefs
* referring to the class symbols of `B` and `C`
*/
diff --git a/src/dotty/annotation/internal/InlineParam.scala b/src/scala/annotation/internal/InlineParam.scala
index a144f9edb..0b3649e89 100644
--- a/src/dotty/annotation/internal/InlineParam.scala
+++ b/src/scala/annotation/internal/InlineParam.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/Repeated.scala b/src/scala/annotation/internal/Repeated.scala
index 24adc051f..75eb3bc25 100644
--- a/src/dotty/annotation/internal/Repeated.scala
+++ b/src/scala/annotation/internal/Repeated.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/SourceFile.scala b/src/scala/annotation/internal/SourceFile.scala
index c49fc2c8d..b203869cf 100644
--- a/src/dotty/annotation/internal/SourceFile.scala
+++ b/src/scala/annotation/internal/SourceFile.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/dotty/annotation/internal/UnsafeNonvariant.scala b/src/scala/annotation/internal/UnsafeNonvariant.scala
index 43a0a114b..b33df65d6 100644
--- a/src/dotty/annotation/internal/UnsafeNonvariant.scala
+++ b/src/scala/annotation/internal/UnsafeNonvariant.scala
@@ -1,4 +1,4 @@
-package dotty.annotation.internal
+package scala.annotation.internal
import scala.annotation.Annotation
diff --git a/src/scalaShadowing/language.scala b/src/scalaShadowing/language.scala
new file mode 100644
index 000000000..a74c9c671
--- /dev/null
+++ b/src/scalaShadowing/language.scala
@@ -0,0 +1,198 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scalaShadowing
+
+/**
+ * The `scala.language` object controls the language features available to the programmer, as proposed in the
+ * [[https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit '''SIP-18 document''']].
+ *
+ * Each of these features has to be explicitly imported into the current scope to become available:
+ * {{{
+ * import language.postfixOps // or language._
+ * List(1, 2, 3) reverse
+ * }}}
+ *
+ * The language features are:
+ * - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait
+ * - [[postfixOps `postfixOps`]] enables postfix operators
+ * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types
+ * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members
+ * - [[higherKinds `higherKinds`]] enables writing higher-kinded types
+ * - [[existentials `existentials`]] enables writing existential types
+ * - [[experimental `experimental`]] contains newer features that have not yet been tested in production
+ *
+ * and, for dotty:
+ *
+ * - [[Scala2 `Scala2`] backwards compatibility mode for Scala2
+ * - [[noAtoTupling `noAutoTupling`]] disable auto-tupling
+ *
+ * @groupname production Language Features
+ * @groupname experimental Experimental Language Features
+ * @groupprio experimental 10
+ *
+ * Dotty-specific features come at the end.
+ *
+ * Note: Due to the more restricted language import mechanism in dotty (only
+ * imports count, implicits are disregarded) we don't need the constructions
+ * of the inherited language features. A simple object for each feature is
+ * sufficient.
+ */
+object language {
+
+ import languageFeature._
+
+ /** Where enabled, direct or indirect subclasses of trait scala.Dynamic can
+ * be defined. Unless dynamics is enabled, a definition of a class, trait,
+ * or object that has Dynamic as a base trait is rejected. Dynamic member
+ * selection of existing subclasses of trait Dynamic are unaffected;
+ * they can be used anywhere.
+ *
+ * '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing
+ * with dynamic languages.
+ *
+ * '''Why control it?''' Dynamic member selection can undermine static checkability
+ * of programs. Furthermore, dynamic member selection often relies on reflection,
+ * which is not available on all platforms.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val dynamics: dynamics = languageFeature.dynamics
+
+ /** Only where enabled, postfix operator notation `(expr op)` will be allowed.
+ *
+ * '''Why keep the feature?''' Several DSLs written in Scala need the notation.
+ *
+ * '''Why control it?''' Postfix operators interact poorly with semicolon inference.
+ * Most programmers avoid them for this reason.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
+
+ /** Only where enabled, accesses to members of structural types that need
+ * reflection are supported. Reminder: A structural type is a type of the form
+ * `Parents { Decls }` where `Decls` contains declarations of new members that do
+ * not override any member in `Parents`. To access one of these members, a
+ * reflective call is needed.
+ *
+ * '''Why keep the feature?''' Structural types provide great flexibility because
+ * they avoid the need to define inheritance hierarchies a priori. Besides,
+ * their definition falls out quite naturally from Scala’s concept of type refinement.
+ *
+ * '''Why control it?''' Reflection is not available on all platforms. Popular tools
+ * such as ProGuard have problems dealing with it. Even where reflection is available,
+ * reflective dispatch can lead to surprising performance degradations.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
+
+ /** Only where enabled, definitions of implicit conversions are allowed. An
+ * implicit conversion is an implicit value of unary function type `A => B`,
+ * or an implicit method that has in its first parameter section a single,
+ * non-implicit parameter. Examples:
+ *
+ * {{{
+ * implicit def stringToInt(s: String): Int = s.length
+ * implicit val conv = (s: String) => s.length
+ * implicit def listToX(xs: List[T])(implicit f: T => X): X = ...
+ * }}}
+ *
+ * implicit values of other types are not affected, and neither are implicit
+ * classes.
+ *
+ * '''Why keep the feature?''' Implicit conversions are central to many aspects
+ * of Scala’s core libraries.
+ *
+ * '''Why control it?''' Implicit conversions are known to cause many pitfalls
+ * if over-used. And there is a tendency to over-use them because they look
+ * very powerful and their effects seem to be easy to understand. Also, in
+ * most situations using implicit parameters leads to a better design than
+ * implicit conversions.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
+
+ /** Only where this flag is enabled, higher-kinded types can be written.
+ *
+ * '''Why keep the feature?''' Higher-kinded types enable the definition of very general
+ * abstractions such as functor, monad, or arrow. A significant set of advanced
+ * libraries relies on them. Higher-kinded types are also at the core of the
+ * scala-virtualized effort to produce high-performance parallel DSLs through staging.
+ *
+ * '''Why control it?''' Higher kinded types in Scala lead to a Turing-complete
+ * type system, where compiler termination is no longer guaranteed. They tend
+ * to be useful mostly for type-level computation and for highly generic design
+ * patterns. The level of abstraction implied by these design patterns is often
+ * a barrier to understanding for newcomers to a Scala codebase. Some syntactic
+ * aspects of higher-kinded types are hard to understand for the uninitiated and
+ * type inference is less effective for them than for normal types. Because we are
+ * not completely happy with them yet, it is possible that some aspects of
+ * higher-kinded types will change in future versions of Scala. So an explicit
+ * enabling also serves as a warning that code involving higher-kinded types
+ * might have to be slightly revised in the future.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
+
+ /** Only where enabled, existential types that cannot be expressed as wildcard
+ * types can be written and are allowed in inferred types of values or return
+ * types of methods. Existential types with wildcard type syntax such as `List[_]`,
+ * or `Map[String, _]` are not affected.
+ *
+ * '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard
+ * types and raw types and the erased types of run-time values.
+ *
+ * '''Why control it?''' Having complex existential types in a code base usually makes
+ * application code very brittle, with a tendency to produce type errors with
+ * obscure error messages. Therefore, going overboard with existential types
+ * is generally perceived not to be a good idea. Also, complicated existential types
+ * might be no longer supported in a future simplification of the language.
+ *
+ * @group production
+ */
+ @volatile implicit lazy val existentials: existentials = languageFeature.existentials
+
+ /** The experimental object contains features that have been recently added but have not
+ * been thoroughly tested in production yet.
+ *
+ * Experimental features '''may undergo API changes''' in future releases, so production
+ * code should not rely on them.
+ *
+ * Programmers are encouraged to try out experimental features and
+ * [[http://issues.scala-lang.org report any bugs or API inconsistencies]]
+ * they encounter so they can be improved in future releases.
+ *
+ * @group experimental
+ */
+ object experimental {
+
+ import languageFeature.experimental._
+
+ /** Where enabled, macro definitions are allowed. Macro implementations and
+ * macro applications are unaffected; they can be used anywhere.
+ *
+ * '''Why introduce the feature?''' Macros promise to make the language more regular,
+ * replacing ad-hoc language constructs with a general powerful abstraction
+ * capability that can express them. Macros are also a more disciplined and
+ * powerful replacement for compiler plugins.
+ *
+ * '''Why control it?''' For their very power, macros can lead to code that is hard
+ * to debug and understand.
+ */
+ @volatile implicit lazy val macros: macros = languageFeature.experimental.macros
+ }
+
+ /** Where imported, a backwards compatibility mode for Scala2 is enabled */
+ object Scala2
+
+ /** Where imported, auto-tupling is disabled */
+ object noAutoTupling
+}