aboutsummaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/sjs/backend/sjs/JSCodeGen.scala64
-rw-r--r--compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala38
-rw-r--r--compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala6
-rw-r--r--compiler/src/dotty/tools/dotc/Compiler.scala1
-rw-r--r--compiler/src/dotty/tools/dotc/ast/Desugar.scala80
-rw-r--r--compiler/src/dotty/tools/dotc/ast/TreeInfo.scala12
-rw-r--r--compiler/src/dotty/tools/dotc/ast/Trees.scala17
-rw-r--r--compiler/src/dotty/tools/dotc/ast/tpd.scala3
-rw-r--r--compiler/src/dotty/tools/dotc/ast/untpd.scala10
-rw-r--r--compiler/src/dotty/tools/dotc/config/Config.scala4
-rw-r--r--compiler/src/dotty/tools/dotc/config/JavaPlatform.scala1
-rw-r--r--compiler/src/dotty/tools/dotc/config/PathResolver.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/config/ScalaSettings.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/core/Comments.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala1
-rw-r--r--compiler/src/dotty/tools/dotc/core/Contexts.scala23
-rw-r--r--compiler/src/dotty/tools/dotc/core/Definitions.scala274
-rw-r--r--compiler/src/dotty/tools/dotc/core/Denotations.scala22
-rw-r--r--compiler/src/dotty/tools/dotc/core/NameOps.scala11
-rw-r--r--compiler/src/dotty/tools/dotc/core/Periods.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/core/Phases.scala5
-rw-r--r--compiler/src/dotty/tools/dotc/core/Scopes.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/core/StdNames.scala4
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymDenotations.scala13
-rw-r--r--compiler/src/dotty/tools/dotc/core/Symbols.scala35
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeComparer.scala52
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeErasure.scala36
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeOps.scala27
-rw-r--r--compiler/src/dotty/tools/dotc/core/Types.scala64
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala3
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Parsers.scala320
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Scanners.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Tokens.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/printing/Formatting.scala5
-rw-r--r--compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala15
-rw-r--r--compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala76
-rw-r--r--compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/Reporter.scala9
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala8
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala111
-rw-r--r--compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala1
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ElimByName.scala3
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Erasure.scala90
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala51
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala8
-rw-r--r--compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala18
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LazyVals.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LiftTry.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/transform/Memoize.scala36
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala4
-rw-r--r--compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala61
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ShortcutImplicits.scala165
-rw-r--r--compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala33
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeChecker.scala7
-rw-r--r--compiler/src/dotty/tools/dotc/transform/TreeTransform.scala10
-rw-r--r--compiler/src/dotty/tools/dotc/transform/ValueClasses.scala10
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Applications.scala163
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Checking.scala56
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Dynamic.scala11
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala14
-rw-r--r--compiler/src/dotty/tools/dotc/typer/FrontEnd.scala24
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Implicits.scala191
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ImportInfo.scala16
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Inferencing.scala16
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Inliner.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Namer.scala132
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala31
-rw-r--r--compiler/src/dotty/tools/dotc/typer/ReTyper.scala5
-rw-r--r--compiler/src/dotty/tools/dotc/typer/RefChecks.scala42
-rw-r--r--compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala22
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Typer.scala263
-rw-r--r--compiler/src/dotty/tools/dotc/util/Chars.scala9
-rw-r--r--compiler/test/dotc/scala-collections.blacklist83
-rw-r--r--compiler/test/dotc/scala-collections.whitelist350
-rw-r--r--compiler/test/dotc/tests.scala71
-rw-r--r--compiler/test/dotty/Jars.scala6
-rw-r--r--compiler/test/dotty/tools/dotc/CompilerTest.scala5
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala6
-rw-r--r--compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala4
82 files changed, 2572 insertions, 851 deletions
diff --git a/compiler/sjs/backend/sjs/JSCodeGen.scala b/compiler/sjs/backend/sjs/JSCodeGen.scala
index 401e01784..69a5651fc 100644
--- a/compiler/sjs/backend/sjs/JSCodeGen.scala
+++ b/compiler/sjs/backend/sjs/JSCodeGen.scala
@@ -127,7 +127,7 @@ class JSCodeGen()(implicit ctx: Context) {
/* Finally, we emit true code for the remaining class defs. */
for (td <- allTypeDefs) {
val sym = td.symbol
- implicit val pos: Position = sym.pos
+ implicit val pos = sym.pos
/* Do not actually emit code for primitive types nor scala.Array. */
val isPrimitive =
@@ -203,7 +203,7 @@ class JSCodeGen()(implicit ctx: Context) {
*/
private def genScalaClass(td: TypeDef): js.ClassDef = {
val sym = td.symbol.asClass
- implicit val pos: Position = sym.pos
+ implicit val pos = sym.pos
assert(!sym.is(Trait),
"genScalaClass() must be called only for normal classes: "+sym)
@@ -336,7 +336,7 @@ class JSCodeGen()(implicit ctx: Context) {
*/
private def genRawJSClassData(td: TypeDef): js.ClassDef = {
val sym = td.symbol.asClass
- implicit val pos: Position = sym.pos
+ implicit val pos = sym.pos
val classIdent = encodeClassFullNameIdent(sym)
val superClass =
@@ -358,7 +358,7 @@ class JSCodeGen()(implicit ctx: Context) {
*/
private def genInterface(td: TypeDef): js.ClassDef = {
val sym = td.symbol.asClass
- implicit val pos: Position = sym.pos
+ implicit val pos = sym.pos
val classIdent = encodeClassFullNameIdent(sym)
@@ -408,7 +408,7 @@ class JSCodeGen()(implicit ctx: Context) {
f <- classSym.info.decls
if !f.is(Method) && f.isTerm
} yield {
- implicit val pos: Position = f.pos
+ implicit val pos = f.pos
val name =
/*if (isExposed(f)) js.StringLiteral(jsNameOf(f))
@@ -479,7 +479,7 @@ class JSCodeGen()(implicit ctx: Context) {
* Other (normal) methods are emitted with `genMethodBody()`.
*/
private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = {
- implicit val pos: Position = dd.pos
+ implicit val pos = dd.pos
val sym = dd.symbol
val vparamss = dd.vparamss
val rhs = dd.rhs
@@ -501,7 +501,7 @@ class JSCodeGen()(implicit ctx: Context) {
val methodName: js.PropertyName = encodeMethodSym(sym)
def jsParams = for (param <- params) yield {
- implicit val pos: Position = param.pos
+ implicit val pos = param.pos
js.ParamDef(encodeLocalSym(param), toIRType(param.info),
mutable = false, rest = false)
}
@@ -574,13 +574,13 @@ class JSCodeGen()(implicit ctx: Context) {
private def genMethodDef(static: Boolean, methodName: js.PropertyName,
paramsSyms: List[Symbol], resultIRType: jstpe.Type,
tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
ctx.debuglog("genMethod " + methodName.name)
ctx.debuglog("")
val jsParams = for (param <- paramsSyms) yield {
- implicit val pos: Position = param.pos
+ implicit val pos = param.pos
js.ParamDef(encodeLocalSym(param), toIRType(param.info),
mutable = false, rest = false)
}
@@ -621,7 +621,7 @@ class JSCodeGen()(implicit ctx: Context) {
/* Any JavaScript expression is also a statement, but at least we get rid
* of some pure expressions that come from our own codegen.
*/
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
tree match {
case js.Block(stats :+ expr) => js.Block(stats :+ exprToStat(expr))
case _:js.Literal | js.This() => js.Skip()
@@ -644,7 +644,7 @@ class JSCodeGen()(implicit ctx: Context) {
* is transformed into an equivalent portion of the JS AST.
*/
private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
ctx.debuglog(" " + tree)
ctx.debuglog("")
@@ -902,7 +902,7 @@ class JSCodeGen()(implicit ctx: Context) {
* primitives, JS calls, etc. They are further dispatched in here.
*/
private def genApply(tree: Apply, isStat: Boolean): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val args = tree.args
val sym = tree.fun.symbol
@@ -951,7 +951,7 @@ class JSCodeGen()(implicit ctx: Context) {
* irrelevant.
*/
private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val Apply(fun @ Select(sup @ Super(_, mix), _), args) = tree
val sym = fun.symbol
@@ -987,7 +987,7 @@ class JSCodeGen()(implicit ctx: Context) {
* * regular new
*/
private def genApplyNew(tree: Apply): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) = tree
val ctor = fun.symbol
@@ -1023,7 +1023,7 @@ class JSCodeGen()(implicit ctx: Context) {
private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = {
import scala.tools.nsc.backend.ScalaPrimitives._
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val Apply(fun, args) = tree
val receiver = qualifierOf(fun)
@@ -1063,7 +1063,7 @@ class JSCodeGen()(implicit ctx: Context) {
private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = {
import scala.tools.nsc.backend.ScalaPrimitives._
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val genArg = genExpr(arg)
val resultIRType = toIRType(tree.tpe)
@@ -1118,7 +1118,7 @@ class JSCodeGen()(implicit ctx: Context) {
}
import OpTypes._
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val lhsIRType = toIRType(lhs.tpe)
val rhsIRType = toIRType(rhs.tpe)
@@ -1374,7 +1374,7 @@ class JSCodeGen()(implicit ctx: Context) {
*/
private def genStringConcat(tree: Apply, receiver: Tree,
args: List[Tree]): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val arg = args.head
@@ -1401,7 +1401,7 @@ class JSCodeGen()(implicit ctx: Context) {
/** Gen JS code for a call to Any.## */
private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
genModuleApplyMethod(defn.ScalaRuntimeModule.requiredMethod(nme.hash_),
List(genExpr(receiver)))
@@ -1411,7 +1411,7 @@ class JSCodeGen()(implicit ctx: Context) {
private def genArrayOp(tree: Tree, code: Int): js.Tree = {
import scala.tools.nsc.backend.ScalaPrimitives._
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val Apply(fun, args) = tree
val arrayObj = qualifierOf(fun)
@@ -1462,7 +1462,7 @@ class JSCodeGen()(implicit ctx: Context) {
// common case for which there is no side-effect nor NPE
genArg
case _ =>
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
/* TODO Check for a null receiver?
* In theory, it's UB, but that decision should be left for link time.
*/
@@ -1474,7 +1474,7 @@ class JSCodeGen()(implicit ctx: Context) {
private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = {
import scala.tools.nsc.backend.ScalaPrimitives._
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val source = genExpr(receiver)
@@ -1544,7 +1544,7 @@ class JSCodeGen()(implicit ctx: Context) {
/** Gen a call to the special `throw` method. */
private def genThrow(tree: Apply, args: List[Tree]): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val exception = args.head
val genException = genExpr(exception)
js.Throw {
@@ -1568,7 +1568,7 @@ class JSCodeGen()(implicit ctx: Context) {
* * Regular method call
*/
private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val fun = tree.fun match {
case fun: Ident => desugarIdent(fun).get
@@ -1616,7 +1616,7 @@ class JSCodeGen()(implicit ctx: Context) {
superIn: Option[Symbol] = None)(
implicit pos: Position): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
def noSpread = !args.exists(_.isInstanceOf[js.JSSpread])
val argc = args.size // meaningful only for methods that don't have varargs
@@ -1775,7 +1775,7 @@ class JSCodeGen()(implicit ctx: Context) {
* primitive instead.)
*/
private def genTypeApply(tree: TypeApply): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val TypeApply(fun, targs) = tree
@@ -1803,7 +1803,7 @@ class JSCodeGen()(implicit ctx: Context) {
/** Gen JS code for a Java Seq literal. */
private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val genElems = tree.elems.map(genExpr)
val arrayType = toReferenceType(tree.tpe).asInstanceOf[jstpe.ArrayType]
@@ -1852,7 +1852,7 @@ class JSCodeGen()(implicit ctx: Context) {
* available in the `body`.
*/
private def genClosure(tree: Closure): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val Closure(env, call, functionalInterface) = tree
val envSize = env.size
@@ -1868,7 +1868,7 @@ class JSCodeGen()(implicit ctx: Context) {
val allCaptureValues = qualifier :: env
val (formalCaptures, actualCaptures) = allCaptureValues.map { value =>
- implicit val pos: Position = value.pos
+ implicit val pos = value.pos
val formalIdent = value match {
case Ident(name) => freshLocalIdent(name.toString)
case This(_) => freshLocalIdent("this")
@@ -1988,7 +1988,7 @@ class JSCodeGen()(implicit ctx: Context) {
/** Gen JS code for an isInstanceOf test (for reference types only) */
private def genIsInstanceOf(tree: Tree, value: js.Tree, to: Type): js.Tree = {
- implicit val pos: Position = tree.pos
+ implicit val pos = tree.pos
val sym = to.widenDealias.typeSymbol
if (sym == defn.ObjectClass) {
@@ -2242,7 +2242,7 @@ class JSCodeGen()(implicit ctx: Context) {
* to perform the conversion to js.Array, then wrap in a Spread
* operator.
*/
- implicit val pos: Position = arg.pos
+ implicit val pos = arg.pos
val jsArrayArg = genModuleApplyMethod(
jsdefn.RuntimePackage_genTraversableOnce2jsArray,
List(genExpr(arg)))
@@ -2259,7 +2259,7 @@ class JSCodeGen()(implicit ctx: Context) {
*/
private def tryGenRepeatedParamAsJSArray(arg: Tree,
handleNil: Boolean): Option[List[js.Tree]] = {
- implicit val pos: Position = arg.pos
+ implicit val pos = arg.pos
// Given a method `def foo(args: T*)`
arg match {
diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
index a7c449947..51fa15706 100644
--- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
+++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
@@ -39,6 +39,10 @@ import dotty.tools.dotc.core.Names.TypeName
import scala.annotation.tailrec
class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Map[Symbol, Set[ClassSymbol]])(implicit ctx: Context) extends BackendInterface{
+ import Symbols.{toDenot, toClassDenot}
+ // Dotty deviation: Need to (re-)import implicit decorators here because otherwise
+ // they would be shadowed by the more deeply nested `symHelper` decorator.
+
type Symbol = Symbols.Symbol
type Type = Types.Type
type Tree = tpd.Tree
@@ -140,7 +144,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
val externalEqualsNumChar: Symbol = NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private
val externalEqualsNumObject: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject)
val externalEquals: Symbol = defn.BoxesRunTimeClass.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol
- val MaxFunctionArity: Int = Definitions.MaxFunctionArity
+ val MaxFunctionArity: Int = Definitions.MaxImplementedFunctionArity
val FunctionClass: Array[Symbol] = defn.FunctionClassPerRun()
val AbstractFunctionClass: Array[Symbol] = defn.AbstractFunctionClassPerRun()
val PartialFunctionClass: Symbol = defn.PartialFunctionClass
@@ -206,18 +210,15 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
implicit val ConstantClassTag: ClassTag[Constant] = ClassTag[Constant](classOf[Constant])
implicit val ClosureTag: ClassTag[Closure] = ClassTag[Closure](classOf[Closure])
- /* dont emit any annotations for now*/
- def isRuntimeVisible(annot: Annotation): Boolean = {
- annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
- case Some(retentionAnnot) =>
- retentionAnnot.tree.find(_.symbol == AnnotationRetentionRuntimeAttr).isDefined
- case _ =>
- // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
- // annotation is emitted with visibility `RUNTIME`
- // dotty bug: #389
- true
+ def isRuntimeVisible(annot: Annotation): Boolean =
+ if (toDenot(annot.atp.typeSymbol).hasAnnotation(AnnotationRetentionAttr))
+ retentionPolicyOf(annot) == AnnotationRetentionRuntimeAttr
+ else {
+ // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
+ // annotation is emitted with visibility `RUNTIME`
+ // dotty bug: #389
+ true
}
- }
def shouldEmitAnnotation(annot: Annotation): Boolean = {
annot.symbol.isJavaDefined &&
@@ -227,7 +228,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
private def retentionPolicyOf(annot: Annotation): Symbol =
annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).
- flatMap(_.argument(0).map(_.symbol)).getOrElse(AnnotationRetentionClassAttr)
+ flatMap(_.argumentConstant(0).map(_.symbolValue)).getOrElse(AnnotationRetentionClassAttr)
private def emitArgument(av: AnnotationVisitor,
name: String,
@@ -559,7 +560,10 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
def javaBinaryName: Name = toDenot(sym).fullNameSeparated("/") // addModuleSuffix(fullNameInternal('/'))
def javaClassName: String = toDenot(sym).fullName.toString// addModuleSuffix(fullNameInternal('.')).toString
def name: Name = sym.name
- def rawname: Name = sym.name // todo ????
+ def rawname: Name = {
+ val original = toDenot(sym).initial
+ sym.name(ctx.withPhase(original.validFor.phaseId))
+ }
// types
def info: Type = toDenot(sym).info
@@ -686,8 +690,6 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
else sym.enclosingClass(ctx.withPhase(ctx.flattenPhase.prev))
} //todo is handled specially for JavaDefined symbols in scalac
-
-
// members
def primaryConstructor: Symbol = toDenot(sym).primaryConstructor
@@ -708,7 +710,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
}
else Nil
- def annotations: List[Annotation] = Nil
+ def annotations: List[Annotation] = toDenot(sym).annotations
def companionModuleMembers: List[Symbol] = {
// phase travel to exitingPickler: this makes sure that memberClassesOf only sees member classes,
// not local classes of the companion module (E in the exmaple) that were lifted by lambdalift.
@@ -1027,7 +1029,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Ma
case JavaArrayType(elem) => elem
case _ =>
ctx.error(s"JavaSeqArray with type ${field.tpe} reached backend: $field", field.pos)
- ErrorType
+ UnspecifiedErrorType
}
def _2: List[Tree] = field.elems
}
diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
index 0027defa7..89831e56b 100644
--- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
+++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
@@ -12,7 +12,7 @@ import dotty.tools.dotc.ast.tpd._
import dotty.tools.dotc.core.Names.TermName
import dotty.tools.dotc.core.StdNames
import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Types.{JavaArrayType, ErrorType, Type}
+import dotty.tools.dotc.core.Types.{JavaArrayType, UnspecifiedErrorType, Type}
import scala.collection.{ mutable, immutable }
@@ -73,7 +73,7 @@ class DottyPrimitives(ctx: Context) {
case JavaArrayType(el) => el
case _ =>
ctx.error(s"expected Array $tpe")
- ErrorType
+ UnspecifiedErrorType
}
code match {
@@ -125,7 +125,7 @@ class DottyPrimitives(ctx: Context) {
/** Initialize the primitive map */
private def init: immutable.Map[Symbol, Int] = {
- implicit val ctx: Context = this.ctx
+ implicit val ctx = this.ctx
import core.Symbols.defn
val primitives = new mutable.HashMap[Symbol, Int]()
diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala
index ad3249be2..900d2b0e3 100644
--- a/compiler/src/dotty/tools/dotc/Compiler.scala
+++ b/compiler/src/dotty/tools/dotc/Compiler.scala
@@ -61,6 +61,7 @@ class Compiler {
new PatternMatcher, // Compile pattern matches
new ExplicitOuter, // Add accessors to outer classes from nested ones.
new ExplicitSelf, // Make references to non-trivial self types explicit as casts
+ new ShortcutImplicits, // Allow implicit functions without creating closures
new CrossCastAnd, // Normalize selections involving intersection types.
new Splitter), // Expand selections involving union types into conditionals
List(new VCInlineMethods, // Inlines calls to value class methods
diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
index 4b2ff1bc3..211683c0a 100644
--- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
@@ -7,6 +7,7 @@ import util.Positions._, Types._, Contexts._, Constants._, Names._, NameOps._, F
import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
import Decorators._
import language.higherKinds
+import typer.FrontEnd
import collection.mutable.ListBuffer
import util.Property
import reporting.diagnostic.messages._
@@ -24,7 +25,6 @@ object desugar {
/** Names of methods that are added unconditionally to case classes */
def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context): Boolean =
- name == nme.isDefined ||
name == nme.copy ||
name == nme.productArity ||
name.isSelectorName
@@ -124,6 +124,13 @@ object desugar {
else vdef
}
+ def makeImplicitParameters(tpts: List[Tree], forPrimaryConstructor: Boolean)(implicit ctx: Context) =
+ for (tpt <- tpts) yield {
+ val paramFlags: FlagSet = if (forPrimaryConstructor) PrivateLocalParamAccessor else Param
+ val epname = ctx.freshName(nme.EVIDENCE_PARAM_PREFIX).toTermName
+ ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | Implicit)
+ }
+
/** Expand context bounds to evidence params. E.g.,
*
* def f[T >: L <: H : B](params)
@@ -142,16 +149,17 @@ object desugar {
val DefDef(name, tparams, vparamss, tpt, rhs) = meth
val mods = meth.mods
val epbuf = new ListBuffer[ValDef]
- val tparams1 = tparams mapConserve {
- case tparam @ TypeDef(_, ContextBounds(tbounds, cxbounds)) =>
- for (cxbound <- cxbounds) {
- val paramFlags: FlagSet = if (isPrimaryConstructor) PrivateLocalParamAccessor else Param
- val epname = ctx.freshName(nme.EVIDENCE_PARAM_PREFIX).toTermName
- epbuf += ValDef(epname, cxbound, EmptyTree).withFlags(paramFlags | Implicit)
- }
- cpy.TypeDef(tparam)(rhs = tbounds)
- case tparam =>
- tparam
+ def desugarContextBounds(rhs: Tree): Tree = rhs match {
+ case ContextBounds(tbounds, cxbounds) =>
+ epbuf ++= makeImplicitParameters(cxbounds, isPrimaryConstructor)
+ tbounds
+ case PolyTypeTree(tparams, body) =>
+ cpy.PolyTypeTree(rhs)(tparams, desugarContextBounds(body))
+ case _ =>
+ rhs
+ }
+ val tparams1 = tparams mapConserve { tparam =>
+ cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs))
}
val meth1 = addEvidenceParams(cpy.DefDef(meth)(tparams = tparams1), epbuf.toList)
@@ -338,10 +346,10 @@ object desugar {
if (isCaseClass) {
def syntheticProperty(name: TermName, rhs: Tree) =
DefDef(name, Nil, Nil, TypeTree(), rhs).withMods(synthetic)
- val isDefinedMeth = syntheticProperty(nme.isDefined, Literal(Constant(true)))
val caseParams = constrVparamss.head.toArray
- val productElemMeths = for (i <- 0 until arity) yield
- syntheticProperty(nme.selectorName(i), Select(This(EmptyTypeIdent), caseParams(i).name))
+ val productElemMeths =
+ for (i <- 0 until arity if nme.selectorName(i) `ne` caseParams(i).name)
+ yield syntheticProperty(nme.selectorName(i), Select(This(EmptyTypeIdent), caseParams(i).name))
def isRepeated(tree: Tree): Boolean = tree match {
case PostfixOp(_, nme.raw.STAR) => true
case ByNameTypeTree(tree1) => isRepeated(tree1)
@@ -356,7 +364,7 @@ object desugar {
if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued
else {
def copyDefault(vparam: ValDef) =
- makeAnnotated(defn.UncheckedVarianceAnnot, refOfDef(vparam))
+ makeAnnotated("scala.annotation.unchecked.uncheckedVariance", refOfDef(vparam))
val copyFirstParams = derivedVparamss.head.map(vparam =>
cpy.ValDef(vparam)(rhs = copyDefault(vparam)))
val copyRestParamss = derivedVparamss.tail.nestedMap(vparam =>
@@ -364,7 +372,7 @@ object desugar {
DefDef(nme.copy, derivedTparams, copyFirstParams :: copyRestParamss, TypeTree(), creatorExpr)
.withMods(synthetic) :: Nil
}
- copyMeths ::: isDefinedMeth :: productElemMeths.toList
+ copyMeths ::: productElemMeths.toList
}
else Nil
@@ -552,7 +560,7 @@ object desugar {
case VarPattern(named, tpt) =>
derivedValDef(original, named, tpt, rhs, mods)
case _ =>
- val rhsUnchecked = makeAnnotated(defn.UncheckedAnnot, rhs)
+ val rhsUnchecked = makeAnnotated("scala.unchecked", rhs)
val vars = getVariables(pat)
val isMatchingTuple: Tree => Boolean = {
case Tuple(es) => es.length == vars.length
@@ -676,11 +684,33 @@ object desugar {
Function(param :: Nil, Block(vdefs, body))
}
- /** Add annotation with class `cls` to tree:
- * tree @cls
+ def makeImplicitFunction(formals: List[Type], body: Tree)(implicit ctx: Context): Tree = {
+ val params = makeImplicitParameters(formals.map(TypeTree), forPrimaryConstructor = false)
+ new ImplicitFunction(params, body)
+ }
+
+ /** Add annotation to tree:
+ * tree @fullName
+ *
+ * The annotation is usually represented as a TypeTree referring to the class
+ * with the given name `fullName`. However, if the annotation matches a file name
+ * that is still to be entered, the annotation is represented as a cascade of `Selects`
+ * following `fullName`. This is necessary so that we avoid reading an annotation from
+ * the classpath that is also compiled from source.
*/
- def makeAnnotated(cls: Symbol, tree: Tree)(implicit ctx: Context) =
- Annotated(tree, untpd.New(untpd.TypeTree(cls.typeRef), Nil))
+ def makeAnnotated(fullName: String, tree: Tree)(implicit ctx: Context) = {
+ val parts = fullName.split('.')
+ val ttree = ctx.typerPhase match {
+ case phase: FrontEnd if phase.stillToBeEntered(parts.last) =>
+ val prefix =
+ ((Ident(nme.ROOTPKG): Tree) /: parts.init)((qual, name) =>
+ Select(qual, name.toTermName))
+ Select(prefix, parts.last.toTypeName)
+ case _ =>
+ TypeTree(ctx.requiredClass(fullName).typeRef)
+ }
+ Annotated(tree, untpd.New(ttree, Nil))
+ }
private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit ctx: Context) = {
val vdef = ValDef(named.name.asTermName, tpt, rhs)
@@ -916,7 +946,11 @@ object desugar {
val elems = segments flatMap {
case ts: Thicket => ts.trees.tail
case t => Nil
+ } map {
+ case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala
+ case t => t
}
+
Apply(Select(Apply(Ident(nme.StringContext), strs), id), elems)
case InfixOp(l, op, r) =>
if (ctx.mode is Mode.Type)
@@ -1079,6 +1113,10 @@ object desugar {
collect(tree)
case Tuple(trees) =>
trees foreach collect
+ case Thicket(trees) =>
+ trees foreach collect
+ case Block(Nil, expr) =>
+ collect(expr)
case _ =>
}
collect(tree)
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
index d1e6bd38a..da83d0644 100644
--- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
+++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -290,6 +290,16 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped]
case _ => false
}
+ /** Is `tree` an implicit function or closure, possibly nested in a block? */
+ def isImplicitClosure(tree: Tree)(implicit ctx: Context): Boolean = unsplice(tree) match {
+ case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Implicit)
+ case Closure(_, meth, _) => true
+ case Block(Nil, expr) => isImplicitClosure(expr)
+ case Block(DefDef(nme.ANON_FUN, _, (param :: _) :: _, _, _) :: Nil, _: Closure) =>
+ param.mods.is(Implicit)
+ case _ => false
+ }
+
// todo: fill with other methods from TreeInfo that only apply to untpd.Tree's
}
@@ -501,7 +511,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
*/
object closure {
def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match {
- case Block(_, Closure(env, meth, tpt)) => Some(env, meth, tpt)
+ case Block(_, expr) => unapply(expr)
case Closure(env, meth, tpt) => Some(env, meth, tpt)
case _ => None
}
diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala
index 2801bcae2..798f0f567 100644
--- a/compiler/src/dotty/tools/dotc/ast/Trees.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala
@@ -32,7 +32,7 @@ object Trees {
/** Property key for trees with documentation strings attached */
val DocComment = new Property.Key[Comment]
- @sharable private var nextId = 0 // for debugging
+ @sharable private var nextId = 0 // for debugging
type LazyTree = AnyRef /* really: Tree | Lazy[Tree] */
type LazyTreeList = AnyRef /* really: List[Tree] | Lazy[List[Tree]] */
@@ -113,7 +113,7 @@ object Trees {
* type. (Overridden by empty trees)
*/
def withType(tpe: Type)(implicit ctx: Context): ThisTree[Type] = {
- if (tpe == ErrorType) assert(ctx.reporter.errorsReported)
+ if (tpe.isInstanceOf[ErrorType]) assert(ctx.reporter.errorsReported)
withTypeUnchecked(tpe)
}
@@ -890,6 +890,11 @@ object Trees {
case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree
case _ => finalize(tree, untpd.Select(qualifier, name))
}
+ /** Copy Ident or Select trees */
+ def Ref(tree: RefTree)(name: Name)(implicit ctx: Context) = tree match {
+ case Ident(_) => Ident(tree)(name)
+ case Select(qual, _) => Select(tree)(qual, name)
+ }
def This(tree: Tree)(qual: untpd.Ident): This = tree match {
case tree: This if qual eq tree.qual => tree
case _ => finalize(tree, untpd.This(qual))
@@ -1224,7 +1229,7 @@ object Trees {
case AppliedTypeTree(tpt, args) =>
this(this(x, tpt), args)
case PolyTypeTree(tparams, body) =>
- implicit val ctx: Context = localCtx
+ implicit val ctx = localCtx
this(this(x, tparams), body)
case ByNameTypeTree(result) =>
this(x, result)
@@ -1237,13 +1242,13 @@ object Trees {
case UnApply(fun, implicits, patterns) =>
this(this(this(x, fun), implicits), patterns)
case tree @ ValDef(name, tpt, _) =>
- implicit val ctx: Context = localCtx
+ implicit val ctx = localCtx
this(this(x, tpt), tree.rhs)
case tree @ DefDef(name, tparams, vparamss, tpt, _) =>
- implicit val ctx: Context = localCtx
+ implicit val ctx = localCtx
this(this((this(x, tparams) /: vparamss)(apply), tpt), tree.rhs)
case TypeDef(name, rhs) =>
- implicit val ctx: Context = localCtx
+ implicit val ctx = localCtx
this(x, rhs)
case tree @ Template(constr, parents, self, _) =>
this(this(this(this(x, constr), parents), self), tree.body)
diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala
index cd6b3fcf2..433808e8e 100644
--- a/compiler/src/dotty/tools/dotc/ast/tpd.scala
+++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala
@@ -450,7 +450,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
} else foldOver(sym, tree)
}
- override val cpy = new TypedTreeCopier
+ override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none)
+ new TypedTreeCopier
class TypedTreeCopier extends TreeCopier {
def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] =
diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala
index 6c5210287..f3ffce8f8 100644
--- a/compiler/src/dotty/tools/dotc/ast/untpd.scala
+++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala
@@ -53,6 +53,12 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
override def isTerm = body.isTerm
override def isType = body.isType
}
+
+ /** An implicit function type */
+ class ImplicitFunction(args: List[Tree], body: Tree) extends Function(args, body) {
+ override def toString = s"ImplicitFunction($args, $body)"
+ }
+
/** A function created from a wildcard expression
* @param placeHolderParams a list of definitions of synthetic parameters
* @param body the function body where wildcards are replaced by
@@ -111,7 +117,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case class Var() extends Mod(Flags.Mutable)
- case class Implicit(flag: FlagSet = Flags.ImplicitCommon) extends Mod(flag)
+ case class Implicit() extends Mod(Flags.ImplicitCommon)
case class Final() extends Mod(Flags.Final)
@@ -270,8 +276,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
// ------ Additional creation methods for untyped only -----------------
- // def TypeTree(tpe: Type): TypeTree = TypeTree().withType(tpe) todo: move to untpd/tpd
-
/** new pre.C[Ts](args1)...(args_n)
* ==>
* (new pre.C).<init>[Ts](args1)...(args_n)
diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala
index 7744a5479..119af9483 100644
--- a/compiler/src/dotty/tools/dotc/config/Config.scala
+++ b/compiler/src/dotty/tools/dotc/config/Config.scala
@@ -133,6 +133,8 @@ object Config {
*/
final val LogPendingFindMemberThreshold = 10
- /** Maximal number of outstanding recursive calls to findMember */
+ /** Maximal number of outstanding recursive calls to findMember before backing out
+ * when findMemberLimit is set.
+ */
final val PendingFindMemberLimit = LogPendingFindMemberThreshold * 4
}
diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
index a695202d3..b5bfbb39f 100644
--- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
+++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
@@ -18,6 +18,7 @@ class JavaPlatform extends Platform {
currentClassPath = Some(new PathResolver().result)
val cp = currentClassPath.get
//println(cp)
+ //println("------------------")
cp
}
diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
index aa4d8aeb0..8df9a8c0e 100644
--- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala
+++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
@@ -143,7 +143,7 @@ object PathResolver {
println(Defaults)
}
else {
- implicit val ctx: Context = (new ContextBase).initialCtx // Dotty deviation: implicits need explicit type
+ implicit val ctx = (new ContextBase).initialCtx
val ArgsSummary(sstate, rest, errors) =
ctx.settings.processArguments(args.toList, true)
errors.foreach(println)
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
index fd2ded0b5..21a6c1165 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -69,7 +69,7 @@ class ScalaSettings extends Settings.SettingGroup {
val genPhaseGraph = StringSetting("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
val XminImplicitSearchDepth = IntSetting("-Xmin-implicit-search-depth", "Set number of levels of implicit searches undertaken before checking for divergence.", 5)
- val xmaxInlines = IntSetting("-Xmax-inlines", "Maximal number of successive inlines", 70)
+ val xmaxInlines = IntSetting("-Xmax-inlines", "Maximal number of successive inlines", 32)
val logImplicitConv = BooleanSetting("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
val logFreeTerms = BooleanSetting("-Xlog-free-terms", "Print a message when reification creates a free term.")
diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala
index 1e623db4d..2559209c3 100644
--- a/compiler/src/dotty/tools/dotc/core/Comments.scala
+++ b/compiler/src/dotty/tools/dotc/core/Comments.scala
@@ -119,7 +119,7 @@ object Comments {
def apply(comment: Comment, code: String, codePos: Position)(implicit ctx: Context) =
new UseCase(comment, code, codePos) {
val untpdCode = {
- val tree = new Parser(new SourceFile("<usecase>", code)).localDef(codePos.start, EmptyFlags)
+ val tree = new Parser(new SourceFile("<usecase>", code)).localDef(codePos.start)
tree match {
case tree: untpd.DefDef =>
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
index 0e155b9e1..42df53fed 100644
--- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -57,6 +57,7 @@ trait ConstraintHandling {
b match {
case b: AndOrType => occursIn(b.tp1) || occursIn(b.tp2)
case b: TypeVar => occursIn(b.origin)
+ case b: TermRef => occursIn(b.underlying)
case _ => false
}
}
diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala
index 639c4d111..29629e505 100644
--- a/compiler/src/dotty/tools/dotc/core/Contexts.scala
+++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala
@@ -216,8 +216,8 @@ object Contexts {
else if (isNonEmptyScopeContext) scope.implicitDecls
else Nil
val outerImplicits =
- if (isImportContext && importInfo.hiddenRoot.exists)
- outer.implicits exclude importInfo.hiddenRoot
+ if (isImportContext && importInfo.unimported.exists)
+ outer.implicits exclude importInfo.unimported
else
outer.implicits
if (implicitRefs.isEmpty) outerImplicits
@@ -262,6 +262,9 @@ object Contexts {
final def withPhaseNoLater(phase: Phase) =
if (phase.exists && ctx.phase.id > phase.id) withPhase(phase) else ctx
+ final def withPhaseNoEarlier(phase: Phase) =
+ if (phase.exists && ctx.phase.id < phase.id) withPhase(phase) else ctx
+
/** If -Ydebug is on, the top of the stack trace where this context
* was created, otherwise `null`.
*/
@@ -422,9 +425,18 @@ object Contexts {
final def withOwner(owner: Symbol): Context =
if (owner ne this.owner) fresh.setOwner(owner) else this
- override def toString =
+ final def withProperty[T](key: Key[T], value: Option[T]): Context =
+ if (property(key) == value) this
+ else value match {
+ case Some(v) => fresh.setProperty(key, v)
+ case None => fresh.dropProperty(key)
+ }
+
+ override def toString = {
+ def iinfo(implicit ctx: Context) = if (ctx.importInfo == null) "" else i"${ctx.importInfo.selectors}%, %"
"Context(\n" +
- (outersIterator map ( ctx => s" owner = ${ctx.owner}, scope = ${ctx.scope}") mkString "\n")
+ (outersIterator map ( ctx => s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = ${iinfo(ctx)}") mkString "\n")
+ }
}
/** A condensed context provides only a small memory footprint over
@@ -468,6 +480,9 @@ object Contexts {
def setProperty[T](key: Key[T], value: T): this.type =
setMoreProperties(moreProperties.updated(key, value))
+ def dropProperty(key: Key[_]): this.type =
+ setMoreProperties(moreProperties - key)
+
def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid))
def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end))
diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala
index 4b090d9b1..45e37eb8b 100644
--- a/compiler/src/dotty/tools/dotc/core/Definitions.scala
+++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala
@@ -12,13 +12,20 @@ import collection.mutable
import scala.reflect.api.{ Universe => ApiUniverse }
object Definitions {
- val MaxTupleArity, MaxAbstractFunctionArity = 22
- val MaxFunctionArity = 30
- // Awaiting a definite solution that drops the limit altogether, 30 gives a safety
- // margin over the previous 22, so that treecopiers in miniphases are allowed to
- // temporarily create larger closures. This is needed in lambda lift where large closures
- // are first formed by treecopiers before they are split apart into parameters and
- // environment in the lambdalift transform itself.
+
+ /** The maximum number of elements in a tuple or product.
+ * This should be removed once we go to hlists.
+ */
+ val MaxTupleArity = 22
+
+ /** The maximum arity N of a function type that's implemented
+ * as a trait `scala.FunctionN`. Functions of higher arity are possible,
+ * but are mapped in erasure to functions taking a single parameter of type
+ * Object[].
+ * The limit 22 is chosen for Scala2x interop. It could be something
+ * else without affecting the set of programs that can be compiled.
+ */
+ val MaxImplementedFunctionArity = 22
}
/** A class defining symbols and types of standard definitions
@@ -45,32 +52,29 @@ class Definitions {
ctx.newSymbol(owner, name, flags | Permanent, info)
private def newClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) =
- ctx.newClassSymbol(owner, name, flags | Permanent, infoFn).entered
+ ctx.newClassSymbol(owner, name, flags | Permanent, infoFn)
- private def newCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) =
+ private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) =
ctx.newCompleteClassSymbol(owner, name, flags | Permanent, parents, decls).entered
- private def newTopClassSymbol(name: TypeName, flags: FlagSet, parents: List[TypeRef]) =
- completeClass(newCompleteClassSymbol(ScalaPackageClass, name, flags, parents))
-
- private def newTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
scope.enter(newSymbol(cls, name, flags, TypeBounds.empty))
- private def newTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
- newTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope)
+ private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope)
- private def newSyntheticTypeParam(cls: ClassSymbol, scope: MutableScope, paramFlags: FlagSet, suffix: String = "T0") =
- newTypeParam(cls, suffix.toTypeName.expandedName(cls), ExpandedName | paramFlags, scope)
+ private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") =
+ enterTypeParam(cls, suffix.toTypeName.expandedName(cls), ExpandedName | paramFlags, scope)
// NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only
// implemented in Dotty and not in Scala 2.
// See <http://docs.scala-lang.org/sips/pending/repeated-byname.html>.
- private def specialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = {
+ private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = {
val completer = new LazyType {
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
val cls = denot.asClass.classSymbol
val paramDecls = newScope
- val typeParam = newSyntheticTypeParam(cls, paramDecls, paramFlags)
+ val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls)
def instantiate(tpe: Type) =
if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef)
else tpe
@@ -79,31 +83,81 @@ class Definitions {
denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parentRefs, paramDecls)
}
}
- newClassSymbol(ScalaPackageClass, name, EmptyFlags, completer)
+ newClassSymbol(ScalaPackageClass, name, EmptyFlags, completer).entered
+ }
+
+ /** The trait FunctionN or ImplicitFunctionN, for some N
+ * @param name The name of the trait to be created
+ *
+ * FunctionN traits follow this template:
+ *
+ * trait FunctionN[T0,...T{N-1}, R] extends Object {
+ * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R
+ * }
+ *
+ * That is, they follow the template given for Function2..Function22 in the
+ * standard library, but without `tupled` and `curried` methods and without
+ * a `toString`.
+ *
+ * ImplicitFunctionN traits follow this template:
+ *
+ * trait ImplicitFunctionN[T0,...,T{N-1}, R] extends Object with FunctionN[T0,...,T{N-1}, R] {
+ * def apply(implicit $x0: T0, ..., $x{N_1}: T{N-1}): R
+ * }
+ */
+ private def newFunctionNTrait(name: TypeName) = {
+ val completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val cls = denot.asClass.classSymbol
+ val decls = newScope
+ val arity = name.functionArity
+ val argParams =
+ for (i <- List.range(0, arity)) yield
+ enterTypeParam(cls, name ++ "$T" ++ i.toString, Contravariant, decls)
+ val resParam = enterTypeParam(cls, name ++ "$R", Covariant, decls)
+ val (methodType, parentTraits) =
+ if (name.startsWith(tpnme.ImplicitFunction)) {
+ val superTrait =
+ FunctionType(arity).appliedTo(argParams.map(_.typeRef) ::: resParam.typeRef :: Nil)
+ (ImplicitMethodType, ctx.normalizeToClassRefs(superTrait :: Nil, cls, decls))
+ }
+ else (MethodType, Nil)
+ val applyMeth =
+ decls.enter(
+ newMethod(cls, nme.apply,
+ methodType(argParams.map(_.typeRef), resParam.typeRef), Deferred))
+ denot.info =
+ ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: parentTraits, decls)
+ }
+ }
+ newClassSymbol(ScalaPackageClass, name, Trait, completer)
}
private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol =
- newSymbol(cls, name.encode, flags | Method, info).entered.asTerm
+ newSymbol(cls, name.encode, flags | Method, info).asTerm
+
+ private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol =
+ newMethod(cls, name, info, flags).entered
- private def newAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = {
+ private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = {
val sym = newSymbol(ScalaPackageClass, name, flags, TypeAlias(tpe))
ScalaPackageClass.currentPackageDecls.enter(sym)
sym
}
- private def newPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int,
+ private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int,
resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = {
val tparamNames = tpnme.syntheticTypeParamNames(typeParamCount)
val tparamBounds = tparamNames map (_ => TypeBounds.empty)
val ptype = PolyType(tparamNames)(_ => tparamBounds, resultTypeFn)
- newMethod(cls, name, ptype, flags)
+ enterMethod(cls, name, ptype, flags)
}
- private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
- newPolyMethod(cls, name, 1, resultTypeFn, flags)
+ private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ enterPolyMethod(cls, name, 1, resultTypeFn, flags)
- private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
- newPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags)
+ private def enterT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ enterPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags)
private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef] = {
val arr = new Array[TypeRef](arity + 1)
@@ -172,20 +226,20 @@ class Definitions {
* def getClass: java.lang.Class[T] = ???
* }
*/
- lazy val AnyClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil))
+ lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil))
def AnyType = AnyClass.typeRef
- lazy val AnyValClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyClass.typeRef)))
+ lazy val AnyValClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyClass.typeRef)))
def AnyValType = AnyValClass.typeRef
- lazy val Any_== = newMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final)
- lazy val Any_!= = newMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final)
- lazy val Any_equals = newMethod(AnyClass, nme.equals_, methOfAny(BooleanType))
- lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
- lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
- lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
- lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
- lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
- lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
+ lazy val Any_== = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final)
+ lazy val Any_!= = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final)
+ lazy val Any_equals = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType))
+ lazy val Any_hashCode = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
+ lazy val Any_toString = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
+ lazy val Any_## = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
+ lazy val Any_getClass = enterMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
+ lazy val Any_isInstanceOf = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
+ lazy val Any_asInstanceOf = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
def AnyMethods = List(Any_==, Any_!=, Any_equals, Any_hashCode,
Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf)
@@ -205,37 +259,37 @@ class Definitions {
}
def ObjectType = ObjectClass.typeRef
- lazy val AnyRefAlias: TypeSymbol = newAliasType(tpnme.AnyRef, ObjectType)
+ lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType)
def AnyRefType = AnyRefAlias.typeRef
- lazy val Object_eq = newMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final)
- lazy val Object_ne = newMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final)
- lazy val Object_synchronized = newPolyMethod(ObjectClass, nme.synchronized_, 1,
+ lazy val Object_eq = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final)
+ lazy val Object_ne = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final)
+ lazy val Object_synchronized = enterPolyMethod(ObjectClass, nme.synchronized_, 1,
pt => MethodType(List(PolyParam(pt, 0)), PolyParam(pt, 0)), Final)
- lazy val Object_clone = newMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected)
- lazy val Object_finalize = newMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected)
- lazy val Object_notify = newMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType))
- lazy val Object_notifyAll = newMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType))
- lazy val Object_wait = newMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType))
- lazy val Object_waitL = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType))
- lazy val Object_waitLI = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType))
+ lazy val Object_clone = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected)
+ lazy val Object_finalize = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected)
+ lazy val Object_notify = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType))
+ lazy val Object_notifyAll = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType))
+ lazy val Object_wait = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType))
+ lazy val Object_waitL = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType))
+ lazy val Object_waitLI = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType))
def ObjectMethods = List(Object_eq, Object_ne, Object_synchronized, Object_clone,
Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI)
/** Dummy method needed by elimByName */
- lazy val dummyApply = newPolyMethod(
+ lazy val dummyApply = enterPolyMethod(
OpsPackageClass, nme.dummyApply, 1,
pt => MethodType(List(FunctionOf(Nil, PolyParam(pt, 0))), PolyParam(pt, 0)))
/** Method representing a throw */
- lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
+ lazy val throwMethod = enterMethod(OpsPackageClass, nme.THROWkw,
MethodType(List(ThrowableType), NothingType))
- lazy val NothingClass: ClassSymbol = newCompleteClassSymbol(
+ lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol(
ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyClass.typeRef))
def NothingType = NothingClass.typeRef
- lazy val NullClass: ClassSymbol = newCompleteClassSymbol(
+ lazy val NullClass: ClassSymbol = enterCompleteClassSymbol(
ScalaPackageClass, tpnme.Null, AbstractFinal, List(ObjectClass.typeRef))
def NullType = NullClass.typeRef
@@ -281,7 +335,7 @@ class Definitions {
lazy val SingletonClass: ClassSymbol =
// needed as a synthetic class because Scala 2.x refers to it in classfiles
// but does not define it as an explicit class.
- newCompleteClassSymbol(
+ enterCompleteClassSymbol(
ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final,
List(AnyClass.typeRef), EmptyScope)
@@ -387,17 +441,17 @@ class Definitions {
lazy val BoxedDoubleModule = ctx.requiredModule("java.lang.Double")
lazy val BoxedUnitModule = ctx.requiredModule("java.lang.Void")
- lazy val ByNameParamClass2x = specialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType))
- lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN, EmptyFlags, Seq(AnyType))
+ lazy val ByNameParamClass2x = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType))
+ lazy val EqualsPatternClass = enterSpecialPolyClass(tpnme.EQUALS_PATTERN, EmptyFlags, Seq(AnyType))
- lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType))
+ lazy val RepeatedParamClass = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType))
// fundamental classes
lazy val StringClass = ctx.requiredClass("java.lang.String")
def StringType: Type = StringClass.typeRef
lazy val StringModule = StringClass.linkedClass
- lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final)
+ lazy val String_+ = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final)
lazy val String_valueOf_Object = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match {
case List(pt) => (pt isRef AnyClass) || (pt isRef ObjectClass)
case _ => false
@@ -431,6 +485,9 @@ class Definitions {
def PartialFunctionClass(implicit ctx: Context) = PartialFunctionType.symbol.asClass
lazy val AbstractPartialFunctionType: TypeRef = ctx.requiredClassRef("scala.runtime.AbstractPartialFunction")
def AbstractPartialFunctionClass(implicit ctx: Context) = AbstractPartialFunctionType.symbol.asClass
+ lazy val FunctionXXLType: TypeRef = ctx.requiredClassRef("scala.FunctionXXL")
+ def FunctionXXLClass(implicit ctx: Context) = FunctionXXLType.symbol.asClass
+
lazy val SymbolType: TypeRef = ctx.requiredClassRef("scala.Symbol")
def SymbolClass(implicit ctx: Context) = SymbolType.symbol.asClass
lazy val DynamicType: TypeRef = ctx.requiredClassRef("scala.Dynamic")
@@ -560,16 +617,18 @@ class Definitions {
sym.owner.linkedClass.typeRef
object FunctionOf {
- def apply(args: List[Type], resultType: Type)(implicit ctx: Context) =
- FunctionType(args.length).appliedTo(args ::: resultType :: Nil)
- def unapply(ft: Type)(implicit ctx: Context)/*: Option[(List[Type], Type)]*/ = {
- // -language:keepUnions difference: unapply needs result type because inferred type
- // is Some[(List[Type], Type)] | None, which is not a legal unapply type.
+ def apply(args: List[Type], resultType: Type, isImplicit: Boolean = false)(implicit ctx: Context) =
+ FunctionType(args.length, isImplicit).appliedTo(args ::: resultType :: Nil)
+ def unapply(ft: Type)(implicit ctx: Context) = {
val tsym = ft.typeSymbol
- lazy val targs = ft.argInfos
- val numArgs = targs.length - 1
- if (numArgs >= 0 && numArgs <= MaxFunctionArity &&
- (FunctionType(numArgs).symbol == tsym)) Some(targs.init, targs.last)
+ val isImplicitFun = isImplicitFunctionClass(tsym)
+ if (isImplicitFun || isFunctionClass(tsym)) {
+ val targs = ft.argInfos
+ val numArgs = targs.length - 1
+ if (numArgs >= 0 && FunctionType(numArgs, isImplicitFun).symbol == tsym)
+ Some(targs.init, targs.last, isImplicitFun)
+ else None
+ }
else None
}
}
@@ -612,19 +671,30 @@ class Definitions {
// ----- Symbol sets ---------------------------------------------------
- lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxAbstractFunctionArity, 0)
+ lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0)
val AbstractFunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => AbstractFunctionType.map(_.symbol.asClass))
def AbstractFunctionClass(n: Int)(implicit ctx: Context) = AbstractFunctionClassPerRun()(ctx)(n)
- lazy val FunctionType = mkArityArray("scala.Function", MaxFunctionArity, 0)
- def FunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => FunctionType.map(_.symbol.asClass))
- def FunctionClass(n: Int)(implicit ctx: Context) = FunctionClassPerRun()(ctx)(n)
- lazy val Function0_applyR = FunctionType(0).symbol.requiredMethodRef(nme.apply)
- def Function0_apply(implicit ctx: Context) = Function0_applyR.symbol
+ private lazy val ImplementedFunctionType = mkArityArray("scala.Function", MaxImplementedFunctionArity, 0)
+ def FunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => ImplementedFunctionType.map(_.symbol.asClass))
lazy val TupleType = mkArityArray("scala.Tuple", MaxTupleArity, 2)
lazy val ProductNType = mkArityArray("scala.Product", MaxTupleArity, 0)
- private lazy val FunctionTypes: Set[TypeRef] = FunctionType.toSet
+ def FunctionClass(n: Int)(implicit ctx: Context) =
+ if (n < MaxImplementedFunctionArity) FunctionClassPerRun()(ctx)(n)
+ else ctx.requiredClass("scala.Function" + n.toString)
+
+ lazy val Function0_applyR = ImplementedFunctionType(0).symbol.requiredMethodRef(nme.apply)
+ def Function0_apply(implicit ctx: Context) = Function0_applyR.symbol
+
+ def ImplicitFunctionClass(n: Int)(implicit ctx: Context) =
+ ctx.requiredClass("scala.ImplicitFunction" + n.toString)
+
+ def FunctionType(n: Int, isImplicit: Boolean = false)(implicit ctx: Context): TypeRef =
+ if (isImplicit && !ctx.erasedTypes) ImplicitFunctionClass(n).typeRef
+ else if (n < MaxImplementedFunctionArity) ImplementedFunctionType(n)
+ else FunctionClass(n).typeRef
+
private lazy val TupleTypes: Set[TypeRef] = TupleType.toSet
private lazy val ProductTypes: Set[TypeRef] = ProductNType.toSet
@@ -637,7 +707,9 @@ class Definitions {
private def isVarArityClass(cls: Symbol, prefix: Name) = {
val name = scalaClassName(cls)
- name.startsWith(prefix) && name.drop(prefix.length).forall(_.isDigit)
+ name.startsWith(prefix) &&
+ name.length > prefix.length &&
+ name.drop(prefix.length).forall(_.isDigit)
}
def isBottomClass(cls: Symbol) =
@@ -646,10 +718,20 @@ class Definitions {
tp.derivesFrom(NothingClass) || tp.derivesFrom(NullClass)
def isFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.Function)
+ def isImplicitFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.ImplicitFunction)
+ def isUnimplementedFunctionClass(cls: Symbol) =
+ isFunctionClass(cls) && cls.name.functionArity > MaxImplementedFunctionArity
def isAbstractFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.AbstractFunction)
def isTupleClass(cls: Symbol) = isVarArityClass(cls, tpnme.Tuple)
def isProductClass(cls: Symbol) = isVarArityClass(cls, tpnme.Product)
+ val predefClassNames: Set[Name] =
+ Set("Predef$", "DeprecatedPredef", "LowPriorityImplicits").map(_.toTypeName)
+
+ /** Is `cls` the predef module class, or a class inherited by Predef? */
+ def isPredefClass(cls: Symbol) =
+ (cls.owner eq ScalaPackageClass) && predefClassNames.contains(cls.name)
+
val StaticRootImportFns = List[() => TermRef](
() => JavaLangPackageVal.termRef,
() => ScalaPackageVal.termRef
@@ -688,13 +770,29 @@ class Definitions {
def isProductSubType(tp: Type)(implicit ctx: Context) =
(tp derivesFrom ProductType.symbol) && tp.baseClasses.exists(isProductClass)
+ def productArity(tp: Type)(implicit ctx: Context) =
+ if (tp derivesFrom ProductType.symbol)
+ tp.baseClasses.find(isProductClass) match {
+ case Some(prod) => prod.typeParams.length
+ case None => -1
+ }
+ else -1
+
+ /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ImplicitFunctionN ? */
def isFunctionType(tp: Type)(implicit ctx: Context) = {
val arity = functionArity(tp)
- 0 <= arity && arity <= MaxFunctionArity && (tp isRef FunctionType(arity).symbol)
+ val sym = tp.dealias.typeSymbol
+ arity >= 0 && (
+ isFunctionClass(sym) && tp.isRef(FunctionType(arity, isImplicit = false).typeSymbol) ||
+ isImplicitFunctionClass(sym) && tp.isRef(FunctionType(arity, isImplicit = true).typeSymbol)
+ )
}
def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1
+ def isImplicitFunctionType(tp: Type)(implicit ctx: Context) =
+ isFunctionType(tp) && tp.dealias.typeSymbol.name.startsWith(tpnme.ImplicitFunction)
+
// ----- primitive value class machinery ------------------------------------------
/** This class would also be obviated by the implicit function type design */
@@ -767,6 +865,26 @@ class Definitions {
// ----- Initialization ---------------------------------------------------
+ private def maxImplemented(name: Name) =
+ if (name `startsWith` tpnme.Function) MaxImplementedFunctionArity else 0
+
+ /** Give the scala package a scope where a FunctionN trait is automatically
+ * added when someone looks for it.
+ */
+ private def makeScalaSpecial()(implicit ctx: Context) = {
+ val oldInfo = ScalaPackageClass.classInfo
+ val oldDecls = oldInfo.decls
+ val newDecls = new MutableScope(oldDecls) {
+ override def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = {
+ val res = super.lookupEntry(name)
+ if (res == null && name.isTypeName && name.functionArity > maxImplemented(name))
+ newScopeEntry(newFunctionNTrait(name.asTypeName))
+ else res
+ }
+ }
+ ScalaPackageClass.info = oldInfo.derivedClassInfo(decls = newDecls)
+ }
+
/** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
lazy val syntheticScalaClasses = List(
AnyClass,
@@ -794,6 +912,8 @@ class Definitions {
def init()(implicit ctx: Context) = {
this.ctx = ctx
if (!_isInitialized) {
+ makeScalaSpecial()
+
// force initialization of every symbol that is synthesized or hijacked by the compiler
val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses()
diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala
index 6a39c5787..99c688d50 100644
--- a/compiler/src/dotty/tools/dotc/core/Denotations.scala
+++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala
@@ -132,7 +132,7 @@ object Denotations {
def atSignature(sig: Signature, site: Type = NoPrefix, relaxed: Boolean = false)(implicit ctx: Context): Denotation
/** The variant of this denotation that's current in the given context.
- * If no such denotation exists, returns the denotation with each alternative
+ * If no such denotation exists, returns the denotation with each alternative
* at its first point of definition.
*/
def current(implicit ctx: Context): Denotation
@@ -744,6 +744,20 @@ object Denotations {
else NoDenotation
}
+ /** The next defined denotation (following `nextInRun`) or an arbitrary
+ * undefined denotation, if all denotations in a `nextinRun` cycle are
+ * undefined.
+ */
+ private def nextDefined: SingleDenotation = {
+ var p1 = this
+ var p2 = nextInRun
+ while (p1.validFor == Nowhere && (p1 ne p2)) {
+ p1 = p1.nextInRun
+ p2 = p2.nextInRun.nextInRun
+ }
+ p1
+ }
+
/** Produce a denotation that is valid for the given context.
* Usually called when !(validFor contains ctx.period)
* (even though this is not a precondition).
@@ -763,8 +777,9 @@ object Denotations {
// can happen if we sit on a stale denotation which has been replaced
// wholesale by an installAfter; in this case, proceed to the next
// denotation and try again.
- if (validFor == Nowhere && nextInRun.validFor != Nowhere) return nextInRun.current
- assert(false)
+ val nxt = nextDefined
+ if (nxt.validFor != Nowhere) return nxt
+ assert(false, this)
}
if (valid.runId != currentPeriod.runId)
@@ -905,6 +920,7 @@ object Denotations {
prev.nextInRun = this
this.nextInRun = old.nextInRun
old.validFor = Nowhere
+ old.nextInRun = this
}
def staleSymbolError(implicit ctx: Context) = {
diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala
index 4c7f5b0a9..c037d1ce7 100644
--- a/compiler/src/dotty/tools/dotc/core/NameOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala
@@ -188,6 +188,8 @@ object NameOps {
def errorName: N = likeTyped(name ++ nme.ERROR)
+ def directName: N = likeTyped(name ++ DIRECT_SUFFIX)
+
def freshened(implicit ctx: Context): N =
likeTyped(
if (name.isModuleClassName) name.stripModuleClassSuffix.freshened.moduleClassName
@@ -229,6 +231,15 @@ object NameOps {
}
}
+ def functionArity: Int = {
+ def test(prefix: Name): Int =
+ if (name.startsWith(prefix))
+ try name.drop(prefix.length).toString.toInt
+ catch { case ex: NumberFormatException => -1 }
+ else -1
+ test(tpnme.Function) max test(tpnme.ImplicitFunction)
+ }
+
/** The name of the generic runtime operation corresponding to an array operation */
def genericArrayOp: TermName = name match {
case nme.apply => nme.array_apply
diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala
index 6efadab7f..29d9d208f 100644
--- a/compiler/src/dotty/tools/dotc/core/Periods.scala
+++ b/compiler/src/dotty/tools/dotc/core/Periods.scala
@@ -153,7 +153,7 @@ object Periods {
final val FirstPhaseId = 1
/** The number of bits needed to encode a phase identifier. */
- final val PhaseWidth = 6
+ final val PhaseWidth = 7
final val PhaseMask = (1 << PhaseWidth) - 1
final val MaxPossiblePhaseId = PhaseMask
}
diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala
index 222e2235d..6a53e1b30 100644
--- a/compiler/src/dotty/tools/dotc/core/Phases.scala
+++ b/compiler/src/dotty/tools/dotc/core/Phases.scala
@@ -26,7 +26,10 @@ trait Phases {
def phasesStack: List[Phase] =
if ((this eq NoContext) || !phase.exists) Nil
- else phase :: outersIterator.dropWhile(_.phase == phase).next.phasesStack
+ else {
+ val rest = outersIterator.dropWhile(_.phase == phase)
+ phase :: (if (rest.hasNext) rest.next.phasesStack else Nil)
+ }
/** Execute `op` at given phase */
def atPhase[T](phase: Phase)(op: Context => T): T =
diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala
index 3daa8117e..6090079e5 100644
--- a/compiler/src/dotty/tools/dotc/core/Scopes.scala
+++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala
@@ -309,7 +309,7 @@ object Scopes {
/** Lookup a symbol entry matching given name.
*/
- override final def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = {
+ override def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = {
var e: ScopeEntry = null
if (hashTable ne null) {
e = hashTable(name.hashCode & (hashTable.length - 1))
diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala
index c2a14b36f..716959648 100644
--- a/compiler/src/dotty/tools/dotc/core/StdNames.scala
+++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala
@@ -129,6 +129,7 @@ object StdNames {
val COMPANION_MODULE_METHOD: N = "companion$module"
val COMPANION_CLASS_METHOD: N = "companion$class"
val TRAIT_SETTER_SEPARATOR: N = "$_setter_$"
+ val DIRECT_SUFFIX: N = "$direct"
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
@@ -181,6 +182,7 @@ object StdNames {
final val AnyVal: N = "AnyVal"
final val ExprApi: N = "ExprApi"
final val Function: N = "Function"
+ final val ImplicitFunction: N = "ImplicitFunction"
final val Mirror: N = "Mirror"
final val Nothing: N = "Nothing"
final val Null: N = "Null"
@@ -265,6 +267,7 @@ object StdNames {
val THIS: N = "_$this"
val TRAIT_CONSTRUCTOR: N = "$init$"
val U2EVT: N = "u2evt$"
+ val ALLARGS: N = "$allArgs"
final val Nil: N = "Nil"
final val Predef: N = "Predef"
@@ -423,7 +426,6 @@ object StdNames {
val info: N = "info"
val inlinedEquals: N = "inlinedEquals"
val isArray: N = "isArray"
- val isDefined: N = "isDefined"
val isDefinedAt: N = "isDefinedAt"
val isDefinedAtImpl: N = "$isDefinedAt"
val isEmpty: N = "isEmpty"
diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
index 8b7c28e19..aaae78c57 100644
--- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -75,7 +75,7 @@ trait SymDenotations { this: Context =>
def explainSym(msg: String) = explain(s"$msg\n defined = ${denot.definedPeriodsString}")
if (denot.is(ValidForever) || denot.isRefinementClass) true
else {
- implicit val ctx: Context = this
+ implicit val ctx = this
val initial = denot.initial
if ((initial ne denot) || ctx.phaseId != initial.validFor.firstPhaseId) {
ctx.withPhase(initial.validFor.firstPhaseId).traceInvalid(initial)
@@ -1937,12 +1937,12 @@ object SymDenotations {
/** A completer for missing references */
class StubInfo() extends LazyType {
- def initializeToDefaults(denot: SymDenotation)(implicit ctx: Context) = {
+ def initializeToDefaults(denot: SymDenotation, errMsg: => String)(implicit ctx: Context) = {
denot.info = denot match {
case denot: ClassDenotation =>
ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, EmptyScope)
case _ =>
- ErrorType
+ new ErrorType(errMsg)
}
denot.privateWithin = NoSymbol
}
@@ -1954,13 +1954,14 @@ object SymDenotations {
if (file != null) (s" in $file", file.toString)
else ("", "the signature")
val name = ctx.fresh.setSetting(ctx.settings.debugNames, true).nameString(denot.name)
- ctx.error(
+ def errMsg =
i"""bad symbolic reference. A signature$location
|refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
|It may be completely missing from the current classpath, or the version on
- |the classpath might be incompatible with the version used when compiling $src.""")
+ |the classpath might be incompatible with the version used when compiling $src."""
+ ctx.error(errMsg)
if (ctx.debug) throw new Error()
- initializeToDefaults(denot)
+ initializeToDefaults(denot, errMsg)
}
}
diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala
index cfd85c49c..5d0dd2123 100644
--- a/compiler/src/dotty/tools/dotc/core/Symbols.scala
+++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala
@@ -22,6 +22,7 @@ import NameOps._
import ast.tpd.Tree
import ast.TreeTypeMap
import Constants.Constant
+import reporting.diagnostic.Message
import Denotations.{ Denotation, SingleDenotation, MultiDenotation }
import collection.mutable
import io.AbstractFile
@@ -290,9 +291,11 @@ trait Symbols { this: Context =>
*/
def newSkolem(tp: Type) = newSymbol(defn.RootClass, nme.SKOLEM, SyntheticArtifact | Permanent, tp)
- def newErrorSymbol(owner: Symbol, name: Name) =
+ def newErrorSymbol(owner: Symbol, name: Name, msg: => Message) = {
+ val errType = new ErrorType(msg)
newSymbol(owner, name, SyntheticArtifact,
- if (name.isTypeName) TypeAlias(ErrorType) else ErrorType)
+ if (name.isTypeName) TypeAlias(errType) else errType)
+ }
/** Map given symbols, subjecting their attributes to the mappings
* defined in the given TreeTypeMap `ttmap`.
@@ -313,10 +316,7 @@ trait Symbols { this: Context =>
newNakedSymbol[original.ThisName](original.coord)
}
val ttmap1 = ttmap.withSubstitution(originals, copies)
- (originals, copies).zipped foreach {(original, copy) =>
- copy.denot = original.denot // preliminary denotation, so that we can access symbols in subsequent transform
- }
- (originals, copies).zipped foreach {(original, copy) =>
+ (originals, copies).zipped foreach { (original, copy) =>
val odenot = original.denot
val oinfo = original.info match {
case ClassInfo(pre, _, parents, decls, selfInfo) =>
@@ -324,14 +324,27 @@ trait Symbols { this: Context =>
ClassInfo(pre, copy.asClass, parents, decls.cloneScope, selfInfo)
case oinfo => oinfo
}
+
+ val completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ denot.info = oinfo // needed as otherwise we won't be able to go from Sym -> parents & etc
+ // Note that this is a hack, but hack commonly used in Dotty
+ // The same thing is done by other completers all the time
+ denot.info = ttmap1.mapType(oinfo)
+ denot.annotations = odenot.annotations.mapConserve(ttmap1.apply)
+ }
+ }
+
copy.denot = odenot.copySymDenotation(
symbol = copy,
owner = ttmap1.mapOwner(odenot.owner),
- initFlags = odenot.flags &~ Frozen | Fresh,
- info = ttmap1.mapType(oinfo),
+ initFlags = odenot.flags &~ (Frozen | Touched) | Fresh,
+ info = completer,
privateWithin = ttmap1.mapOwner(odenot.privateWithin), // since this refers to outer symbols, need not include copies (from->to) in ownermap here.
- annotations = odenot.annotations.mapConserve(ttmap1.apply))
+ annotations = odenot.annotations)
+
}
+
copies
}
@@ -390,6 +403,10 @@ object Symbols {
denot
}
+ /** The initial denotation of this symbol, without going through `current` */
+ final def initialDenot(implicit ctx: Context): SymDenotation =
+ lastDenot.initial
+
private[core] def defRunId: RunId =
if (lastDenot == null) NoRunId else lastDenot.validFor.runId
diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
index f78820fff..6063cbf38 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -16,7 +16,7 @@ import scala.util.control.NonFatal
/** Provides methods to compare types.
*/
class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
- implicit val ctx: Context = initctx
+ implicit val ctx = initctx
val state = ctx.typerState
import state.constraint
@@ -156,7 +156,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
private def firstTry(tp1: Type, tp2: Type): Boolean = tp2 match {
case tp2: NamedType =>
def compareNamed(tp1: Type, tp2: NamedType): Boolean = {
- implicit val ctx: Context = this.ctx
+ implicit val ctx = this.ctx
tp2.info match {
case info2: TypeAlias => isSubType(tp1, info2.alias)
case _ => tp1 match {
@@ -260,7 +260,12 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
secondTry(tp1, tp2)
}
compareErasedValueType
- case ErrorType =>
+ case ConstantType(v2) =>
+ tp1 match {
+ case ConstantType(v1) => v1.value == v2.value
+ case _ => secondTry(tp1, tp2)
+ }
+ case _: FlexType =>
true
case _ =>
secondTry(tp1, tp2)
@@ -336,7 +341,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
false
}
joinOK || isSubType(tp11, tp2) && isSubType(tp12, tp2)
- case ErrorType =>
+ case _: FlexType =>
true
case _ =>
thirdTry(tp1, tp2)
@@ -370,11 +375,22 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
thirdTryNamed(tp1, tp2)
case tp2: PolyParam =>
def comparePolyParam =
- (ctx.mode is Mode.TypevarsMissContext) ||
- isSubTypeWhenFrozen(tp1, bounds(tp2).lo) || {
+ (ctx.mode is Mode.TypevarsMissContext) || {
+ val alwaysTrue =
+ // The following condition is carefully formulated to catch all cases
+ // where the subtype relation is true without needing to add a constraint
+ // It's tricky because we might need to either appriximate tp2 by its
+ // lower bound or else widen tp1 and check that the result is a subtype of tp2.
+ // So if the constraint is not yet frozen, we do the same comparison again
+ // with a frozen constraint, which means that we get a chance to do the
+ // widening in `fourthTry` before adding to the constraint.
+ if (frozenConstraint || alwaysFluid) isSubType(tp1, bounds(tp2).lo)
+ else isSubTypeWhenFrozen(tp1, tp2)
+ alwaysTrue || {
if (canConstrain(tp2)) addConstraint(tp2, tp1.widenExpr, fromBelow = true)
else fourthTry(tp1, tp2)
}
+ }
comparePolyParam
case tp2: RefinedType =>
def compareRefinedSlow: Boolean = {
@@ -473,7 +489,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case tp1 @ MethodType(_, formals1) =>
(tp1.signature consistentParams tp2.signature) &&
matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
- tp1.isImplicit == tp2.isImplicit && // needed?
+ (!tp1.isImplicit || tp2.isImplicit) && // non-implicit functions shadow implicit ones
isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
case _ =>
false
@@ -541,9 +557,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
/** if `tp2 == p.type` and `p: q.type` then try `tp1 <:< q.type` as a last effort.*/
def comparePaths = tp2 match {
case tp2: TermRef =>
- tp2.info.widenExpr match {
+ tp2.info.widenExpr.dealias match {
case tp2i: SingletonType =>
- isSubType(tp1, tp2i) // see z1720.scala for a case where this can arise even in typer.
+ isSubType(tp1, tp2i)
+ // see z1720.scala for a case where this can arise even in typer.
+ // Also, i1753.scala, to show why the dealias above is necessary.
case _ => false
}
case _ =>
@@ -750,8 +768,14 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
if (args1.isEmpty) args2.isEmpty
else args2.nonEmpty && {
val v = tparams.head.paramVariance
- (v > 0 || isSubType(args2.head, args1.head)) &&
- (v < 0 || isSubType(args1.head, args2.head))
+ def isSub(tp1: Type, tp2: Type) = tp2 match {
+ case tp2: TypeBounds =>
+ tp2.contains(tp1)
+ case _ =>
+ (v > 0 || isSubType(tp2, tp1)) &&
+ (v < 0 || isSubType(tp1, tp2))
+ }
+ isSub(args1.head, args2.head)
} && isSubArgs(args1.tail, args2.tail, tparams)
/** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
@@ -996,9 +1020,9 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case tp1: MethodType =>
tp2.widen match {
case tp2: MethodType =>
- tp1.isImplicit == tp2.isImplicit &&
- matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isJava, tp2.isJava) &&
- matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
+ // implicitness is ignored when matching
+ matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isJava, tp2.isJava) &&
+ matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
case tp2 =>
relaxed && tp1.paramNames.isEmpty &&
matchesType(tp1.resultType, tp2, relaxed)
diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
index abbacee49..91e37d440 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -2,11 +2,14 @@ package dotty.tools
package dotc
package core
-import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Decorators._, Flags.JavaDefined
+import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Decorators._
+import Flags.JavaDefined
+import NameOps._
import Uniques.unique
import dotc.transform.ExplicitOuter._
import dotc.transform.ValueClasses._
import util.DotClass
+import Definitions.MaxImplementedFunctionArity
/** Erased types are:
*
@@ -38,7 +41,10 @@ object TypeErasure {
case _: ErasedValueType =>
true
case tp: TypeRef =>
- tp.symbol.isClass && tp.symbol != defn.AnyClass && tp.symbol != defn.ArrayClass
+ val sym = tp.symbol
+ sym.isClass &&
+ sym != defn.AnyClass && sym != defn.ArrayClass &&
+ !defn.isUnimplementedFunctionClass(sym) && !defn.isImplicitFunctionClass(sym)
case _: TermRef =>
true
case JavaArrayType(elem) =>
@@ -51,7 +57,7 @@ object TypeErasure {
tp.paramTypes.forall(isErasedType) && isErasedType(tp.resultType)
case tp @ ClassInfo(pre, _, parents, decls, _) =>
isErasedType(pre) && parents.forall(isErasedType) //&& decls.forall(sym => isErasedType(sym.info)) && isErasedType(tp.selfType)
- case NoType | NoPrefix | WildcardType | ErrorType | SuperType(_, _) =>
+ case NoType | NoPrefix | WildcardType | _: ErrorType | SuperType(_, _) =>
true
case _ =>
false
@@ -176,8 +182,13 @@ object TypeErasure {
else if (sym.isAbstractType) TypeAlias(WildcardType)
else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(erasureCtx))
else erase.eraseInfo(tp, sym)(erasureCtx) match {
- case einfo: MethodType if sym.isGetter && einfo.resultType.isRef(defn.UnitClass) =>
- MethodType(Nil, defn.BoxedUnitType)
+ case einfo: MethodType =>
+ if (sym.isGetter && einfo.resultType.isRef(defn.UnitClass))
+ MethodType(Nil, defn.BoxedUnitType)
+ else if (sym.isAnonymousFunction && einfo.paramTypes.length > MaxImplementedFunctionArity)
+ MethodType(nme.ALLARGS :: Nil, JavaArrayType(defn.ObjectType) :: Nil, einfo.resultType)
+ else
+ einfo
case einfo =>
einfo
}
@@ -317,6 +328,8 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
* - For a term ref p.x, the type <noprefix> # x.
* - For a typeref scala.Any, scala.AnyVal or scala.Singleton: |java.lang.Object|
* - For a typeref scala.Unit, |scala.runtime.BoxedUnit|.
+ * - For a typeref scala.FunctionN, where N > MaxImplementedFunctionArity, scala.FunctionXXL
+ * - For a typeref scala.ImplicitFunctionN, | scala.FunctionN |
* - For a typeref P.C where C refers to a class, <noprefix> # C.
* - For a typeref P.C where C refers to an alias type, the erasure of C's alias.
* - For a typeref P.C where C refers to an abstract type, the erasure of C's upper bound.
@@ -345,6 +358,8 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
if (!sym.isClass) this(tp.info)
else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClassRef(tp)
else if (sym == defn.ArrayClass) apply(tp.appliedTo(TypeBounds.empty)) // i966 shows that we can hit a raw Array type.
+ else if (defn.isUnimplementedFunctionClass(sym)) defn.FunctionXXLType
+ else if (defn.isImplicitFunctionClass(sym)) apply(defn.FunctionType(sym.name.functionArity))
else eraseNormalClassRef(tp)
case tp: RefinedType =>
val parent = tp.parent
@@ -387,7 +402,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
tp.derivedClassInfo(NoPrefix, parents, erasedDecls, erasedRef(tp.selfType))
// can't replace selftype by NoType because this would lose the sourceModule link
}
- case NoType | NoPrefix | ErrorType | JavaArrayType(_) =>
+ case NoType | NoPrefix | _: ErrorType | JavaArrayType(_) =>
tp
case tp: WildcardType if wildcardOK =>
tp
@@ -427,7 +442,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
private def eraseDerivedValueClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
val cls = tref.symbol.asClass
val underlying = underlyingOfValueClass(cls)
- if (underlying.exists) ErasedValueType(tref, valueErasure(underlying))
+ if (underlying.exists && !isCyclic(cls)) ErasedValueType(tref, valueErasure(underlying))
else NoType
}
@@ -481,7 +496,10 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
val erasedVCRef = eraseDerivedValueClassRef(tp)
if (erasedVCRef.exists) return sigName(erasedVCRef)
}
- normalizeClass(sym.asClass).fullName.asTypeName
+ if (defn.isImplicitFunctionClass(sym))
+ sigName(defn.FunctionType(sym.name.functionArity))
+ else
+ normalizeClass(sym.asClass).fullName.asTypeName
case defn.ArrayOf(elem) =>
sigName(this(tp))
case JavaArrayType(elem) =>
@@ -495,7 +513,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
if (inst.exists) sigName(inst) else tpnme.Uninstantiated
case tp: TypeProxy =>
sigName(tp.underlying)
- case ErrorType | WildcardType =>
+ case _: ErrorType | WildcardType =>
tpnme.WILDCARD
case tp: WildcardType =>
sigName(tp.optBounds)
diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
index 92e5f9d57..c2a7d7ea6 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
@@ -10,7 +10,8 @@ import NameOps._
import Decorators._
import StdNames._
import Annotations._
-import util.SimpleMap
+import config.Config
+import util.{SimpleMap, Property}
import collection.mutable
import ast.tpd._
@@ -67,7 +68,10 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
if (thiscls.derivesFrom(cls) && pre.baseTypeRef(thiscls).exists) {
if (theMap != null && theMap.currentVariance <= 0 && !isLegalPrefix(pre)) {
ctx.base.unsafeNonvariant = ctx.runId
- AnnotatedType(pre, Annotation(defn.UnsafeNonvariantAnnot, Nil))
+ pre match {
+ case AnnotatedType(_, ann) if ann.symbol == defn.UnsafeNonvariantAnnot => pre
+ case _ => AnnotatedType(pre, Annotation(defn.UnsafeNonvariantAnnot, Nil))
+ }
}
else pre
}
@@ -85,13 +89,15 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
if (sym.isStatic) tp
else {
val pre1 = asSeenFrom(tp.prefix, pre, cls, theMap)
- if (pre1.isUnsafeNonvariant)
- pre1.member(tp.name).info match {
+ if (pre1.isUnsafeNonvariant) {
+ val safeCtx = ctx.withProperty(TypeOps.findMemberLimit, Some(()))
+ pre1.member(tp.name)(safeCtx).info match {
case TypeAlias(alias) =>
// try to follow aliases of this will avoid skolemization.
return alias
case _ =>
}
+ }
tp.derivedSelect(pre1)
}
case tp: ThisType =>
@@ -197,6 +203,9 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
case c :: rest =>
val accu1 = if (accu exists (_ derivesFrom c)) accu else c :: accu
if (cs == c.baseClasses) accu1 else dominators(rest, accu1)
+ case Nil => // this case can happen because after erasure we do not have a top class anymore
+ assert(ctx.erasedTypes)
+ defn.ObjectClass :: Nil
}
def mergeRefined(tp1: Type, tp2: Type): Type = {
@@ -430,7 +439,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
formals = formals.updated(name, tp1.typeParamNamed(name))
}
normalizeToRef(tp1)
- case ErrorType =>
+ case _: ErrorType =>
defn.AnyType
case AnnotatedType(tpe, _) =>
normalizeToRef(tpe)
@@ -543,7 +552,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
def dynamicsEnabled =
featureEnabled(defn.LanguageModuleClass, nme.dynamics)
- def testScala2Mode(msg: String, pos: Position) = {
+ def testScala2Mode(msg: => String, pos: Position) = {
if (scala2Mode) migrationWarning(msg, pos)
scala2Mode
}
@@ -551,4 +560,10 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
object TypeOps {
@sharable var track = false // !!!DEBUG
+
+ /** When a property with this key is set in a context, it limit the number
+ * of recursive member searches. If the limit is reached, findMember returns
+ * NoDenotation.
+ */
+ val findMemberLimit = new Property.Key[Unit]
}
diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala
index 89bc21929..636204f64 100644
--- a/compiler/src/dotty/tools/dotc/core/Types.scala
+++ b/compiler/src/dotty/tools/dotc/core/Types.scala
@@ -15,9 +15,10 @@ import SymDenotations._
import Decorators._
import Denotations._
import Periods._
-import util.Positions.Position
+import util.Positions.{Position, NoPosition}
import util.Stats._
import util.{DotClass, SimpleMap}
+import reporting.diagnostic.Message
import ast.tpd._
import ast.TreeTypeMap
import printing.Texts._
@@ -176,7 +177,7 @@ object Types {
/** Is this type produced as a repair for an error? */
final def isError(implicit ctx: Context): Boolean = stripTypeVar match {
- case ErrorType => true
+ case _: ErrorType => true
case tp => (tp.typeSymbol is Erroneous) || (tp.termSymbol is Erroneous)
}
@@ -216,6 +217,14 @@ object Types {
case _ => false
}
+ /** Is this the type of a method with a leading empty parameter list?
+ */
+ def isNullaryMethod(implicit ctx: Context): Boolean = this match {
+ case MethodType(Nil, _) => true
+ case tp: PolyType => tp.resultType.isNullaryMethod
+ case _ => false
+ }
+
/** Is this an alias TypeBounds? */
def isAlias: Boolean = this.isInstanceOf[TypeAlias]
@@ -387,8 +396,8 @@ object Types {
tp.decls.denotsNamed(name).filterExcluded(excluded).toDenot(NoPrefix)
case tp: TypeProxy =>
tp.underlying.findDecl(name, excluded)
- case ErrorType =>
- ctx.newErrorSymbol(classSymbol orElse defn.RootClass, name)
+ case err: ErrorType =>
+ ctx.newErrorSymbol(classSymbol orElse defn.RootClass, name, err.msg)
case _ =>
NoDenotation
}
@@ -453,8 +462,8 @@ object Types {
go(tp.join)
case tp: JavaArrayType =>
defn.ObjectType.findMember(name, pre, excluded)
- case ErrorType =>
- ctx.newErrorSymbol(pre.classSymbol orElse defn.RootClass, name)
+ case err: ErrorType =>
+ ctx.newErrorSymbol(pre.classSymbol orElse defn.RootClass, name, err.msg)
case _ =>
NoDenotation
}
@@ -572,8 +581,12 @@ object Types {
{ val recCount = ctx.findMemberCount + 1
ctx.findMemberCount = recCount
- if (recCount >= Config.LogPendingFindMemberThreshold)
+ if (recCount >= Config.LogPendingFindMemberThreshold) {
ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches
+ if (ctx.property(TypeOps.findMemberLimit).isDefined &&
+ ctx.findMemberCount > Config.PendingFindMemberLimit)
+ return NoDenotation
+ }
}
//assert(ctx.findMemberCount < 20)
@@ -1210,7 +1223,7 @@ object Types {
case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) =>
val formals1 = if (dropLast == 0) formals else formals dropRight dropLast
defn.FunctionOf(
- formals1 mapConserve (_.underlyingIfRepeated(mt.isJava)), mt.resultType)
+ formals1 mapConserve (_.underlyingIfRepeated(mt.isJava)), mt.resultType, mt.isImplicit && !ctx.erasedTypes)
}
/** The signature of this type. This is by default NotAMethod,
@@ -1497,7 +1510,7 @@ object Types {
(lastDefRunId != sym.defRunId) ||
(lastDefRunId == NoRunId)
} ||
- (lastSymbol.infoOrCompleter == ErrorType ||
+ (lastSymbol.infoOrCompleter.isInstanceOf[ErrorType] ||
sym.owner != lastSymbol.owner &&
(sym.owner.derivesFrom(lastSymbol.owner) ||
selfTypeOf(sym).derivesFrom(lastSymbol.owner) ||
@@ -2275,11 +2288,13 @@ object Types {
protected def resultSignature(implicit ctx: Context) = try resultType match {
case rtp: MethodicType => rtp.signature
- case tp => Signature(tp, isJava = false)
+ case tp =>
+ if (tp.isRef(defn.UnitClass)) Signature(Nil, defn.UnitClass.fullName.asTypeName)
+ else Signature(tp, isJava = false)
}
catch {
case ex: AssertionError =>
- println(i"failure while taking result signture of $this: $resultType")
+ println(i"failure while taking result signature of $this: $resultType")
throw ex
}
@@ -2376,7 +2391,9 @@ object Types {
protected def computeSignature(implicit ctx: Context): Signature =
resultSignature.prepend(paramTypes, isJava)
- def derivedMethodType(paramNames: List[TermName], paramTypes: List[Type], resType: Type)(implicit ctx: Context) =
+ def derivedMethodType(paramNames: List[TermName] = this.paramNames,
+ paramTypes: List[Type] = this.paramTypes,
+ resType: Type = this.resType)(implicit ctx: Context) =
if ((paramNames eq this.paramNames) && (paramTypes eq this.paramTypes) && (resType eq this.resType)) this
else {
val resTypeFn = (x: MethodType) => resType.subst(this, x)
@@ -2693,7 +2710,7 @@ object Types {
protected def checkInst(implicit ctx: Context): this.type = {
def check(tycon: Type): Unit = tycon.stripTypeVar match {
case tycon: TypeRef if !tycon.symbol.isClass =>
- case _: PolyParam | ErrorType | _: WildcardType =>
+ case _: PolyParam | _: ErrorType | _: WildcardType =>
case _: PolyType =>
assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this")
case tycon: AnnotatedType =>
@@ -2862,14 +2879,14 @@ object Types {
*
* @param origin The parameter that's tracked by the type variable.
* @param creatorState The typer state in which the variable was created.
- * @param owningTree The function part of the TypeApply tree tree that introduces
- * the type variable.
+ * @param bindingTree The TypeTree which introduces the type variable, or EmptyTree
+ * if the type variable does not correspond to a source term.
* @paran owner The current owner if the context where the variable was created.
*
* `owningTree` and `owner` are used to determine whether a type-variable can be instantiated
* at some given point. See `Inferencing#interpolateUndetVars`.
*/
- final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType {
+ final class TypeVar(val origin: PolyParam, creatorState: TyperState, val bindingTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType {
/** The permanent instance type of the variable, or NoType is none is given yet */
private[core] var inst: Type = NoType
@@ -3251,12 +3268,19 @@ object Types {
override def computeHash = hashSeed
}
- abstract class ErrorType extends UncachedGroundType with ValueType
+ /** A common superclass of `ErrorType` and `TryDynamicCallSite`. Instances of this
+ * class are at the same time subtypes and supertypes of every other type.
+ */
+ abstract class FlexType extends UncachedGroundType with ValueType
+
+ class ErrorType(_msg: => Message) extends FlexType {
+ val msg = _msg
+ }
- object ErrorType extends ErrorType
+ object UnspecifiedErrorType extends ErrorType("unspecified error")
/* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */
- object TryDynamicCallType extends ErrorType
+ object TryDynamicCallType extends FlexType
/** Wildcard type, possibly with bounds */
abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
@@ -3378,7 +3402,7 @@ object Types {
/** Map this function over given type */
def mapOver(tp: Type): Type = {
- implicit val ctx: Context = this.ctx // Dotty deviation: implicits need explicit type
+ implicit val ctx = this.ctx
tp match {
case tp: NamedType =>
if (stopAtStatic && tp.symbol.isStatic) tp
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
index 0f63b25bb..92ab10db9 100644
--- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -90,7 +90,8 @@ object JavaParsers {
if (skipIt)
skip()
}
- def errorTypeTree = TypeTree().withType(ErrorType) withPos Position(in.offset)
+
+ def errorTypeTree = TypeTree().withType(UnspecifiedErrorType) withPos Position(in.offset)
// --------- tree building -----------------------------
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
index fa0576c7a..f62093db0 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -83,6 +83,15 @@ object Parsers {
def atPos[T <: Positioned](start: Offset)(t: T): T =
atPos(start, start)(t)
+ def startOffset(t: Positioned): Int =
+ if (t.pos.exists) t.pos.start else in.offset
+
+ def pointOffset(t: Positioned): Int =
+ if (t.pos.exists) t.pos.point else in.offset
+
+ def endOffset(t: Positioned): Int =
+ if (t.pos.exists) t.pos.end else in.lastOffset
+
def nameStart: Offset =
if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset
@@ -137,6 +146,7 @@ object Parsers {
def isNumericLit = numericLitTokens contains in.token
def isModifier = modifierTokens contains in.token
def isExprIntro = canStartExpressionTokens contains in.token
+ def isBindingIntro = canStartBindingTokens contains in.token
def isTemplateIntro = templateIntroTokens contains in.token
def isDclIntro = dclIntroTokens contains in.token
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
@@ -448,7 +458,7 @@ object Parsers {
val topInfo = opStack.head
opStack = opStack.tail
val od = reduceStack(base, topInfo.operand, 0, true)
- return atPos(od.pos.start, topInfo.offset) {
+ return atPos(startOffset(od), topInfo.offset) {
PostfixOp(od, topInfo.operator)
}
}
@@ -492,9 +502,9 @@ object Parsers {
/** Accept identifier acting as a selector on given tree `t`. */
def selector(t: Tree): Tree =
- atPos(t.pos.start, in.offset) { Select(t, ident()) }
+ atPos(startOffset(t), in.offset) { Select(t, ident()) }
- /** Selectors ::= ident { `.' ident()
+ /** Selectors ::= id { `.' id }
*
* Accept `.' separated identifiers acting as a selectors on given tree `t`.
* @param finish An alternative parse in case the next token is not an identifier.
@@ -505,7 +515,7 @@ object Parsers {
if (t1 ne t) t1 else dotSelectors(selector(t), finish)
}
- /** DotSelectors ::= { `.' ident()
+ /** DotSelectors ::= { `.' id }
*
* Accept `.' separated identifiers acting as a selectors on given tree `t`.
* @param finish An alternative parse in case the token following a `.' is not an identifier.
@@ -518,9 +528,9 @@ object Parsers {
private val id: Tree => Tree = x => x
/** Path ::= StableId
- * | [Ident `.'] this
+ * | [id `.'] this
*
- * @param thisOK If true, [Ident `.'] this is acceptable as the path.
+ * @param thisOK If true, the path can end with the keyword `this`.
* If false, another selection is required after the `this`.
* @param finish An alternative parse in case the token following a `.' is not an identifier.
* If the alternative does not apply, its tree argument is returned unchanged.
@@ -530,7 +540,7 @@ object Parsers {
def handleThis(qual: Ident) = {
in.nextToken()
val t = atPos(start) { This(qual) }
- if (!thisOK && in.token != DOT) syntaxError("`.' expected")
+ if (!thisOK && in.token != DOT) syntaxError(DanglingThisInPath(), t.pos)
dotSelectors(t, finish)
}
def handleSuper(qual: Ident) = {
@@ -555,20 +565,20 @@ object Parsers {
}
}
- /** MixinQualifier ::= `[' Id `]'
+ /** MixinQualifier ::= `[' id `]'
*/
def mixinQualifierOpt(): Ident =
if (in.token == LBRACKET) inBrackets(atPos(in.offset) { typeIdent() })
else EmptyTypeIdent
- /** StableId ::= Id
- * | Path `.' Id
+ /** StableId ::= id
+ * | Path `.' id
* | [id '.'] super [`[' id `]']`.' id
*/
def stableId(): Tree =
path(thisOK = false)
- /** QualId ::= Id {`.' Id}
+ /** QualId ::= id {`.' id}
*/
def qualId(): Tree =
dotSelectors(termIdent())
@@ -588,7 +598,7 @@ object Parsers {
val isNegated = negOffset < in.offset
atPos(negOffset) {
if (in.token == SYMBOLLIT) atPos(in.skipToken()) { SymbolLit(in.strVal) }
- else if (in.token == INTERPOLATIONID) interpolatedString()
+ else if (in.token == INTERPOLATIONID) interpolatedString(inPattern)
else finish(in.token match {
case CHARLIT => in.charVal
case INTLIT => in.intVal(isNegated).toInt
@@ -612,10 +622,14 @@ object Parsers {
in.nextToken()
while (in.token == STRINGPART) {
segmentBuf += Thicket(
- literal(),
+ literal(inPattern = inPattern),
atPos(in.offset) {
if (in.token == IDENTIFIER)
termIdent()
+ else if (in.token == USCORE && inPattern) {
+ in.nextToken()
+ Ident(nme.WILDCARD)
+ }
else if (in.token == THIS) {
in.nextToken()
This(EmptyTypeIdent)
@@ -624,12 +638,12 @@ object Parsers {
if (inPattern) Block(Nil, inBraces(pattern()))
else expr()
else {
- ctx.error(InterpolatedStringError())
+ ctx.error(InterpolatedStringError(), source atPos Position(in.offset))
EmptyTree
}
})
}
- if (in.token == STRINGLIT) segmentBuf += literal()
+ if (in.token == STRINGLIT) segmentBuf += literal(inPattern = inPattern)
InterpolatedString(interpolator, segmentBuf.toList)
}
@@ -662,13 +676,13 @@ object Parsers {
val t = typ()
findWildcardType(t) match {
case Some(wildcardPos) =>
- syntaxError("unbound wildcard type", wildcardPos)
+ syntaxError(UnboundWildcardType(), wildcardPos)
scalaAny
case None => t
}
}
- /** Type ::= FunArgTypes `=>' Type
+ /** Type ::= [`implicit'] FunArgTypes `=>' Type
* | HkTypeParamClause `->' Type
* | InfixType
* FunArgTypes ::= InfixType
@@ -676,20 +690,26 @@ object Parsers {
*/
def typ(): Tree = {
val start = in.offset
+ val isImplicit = in.token == IMPLICIT
+ if (isImplicit) in.nextToken()
+ def functionRest(params: List[Tree]): Tree =
+ atPos(start, accept(ARROW)) {
+ val t = typ()
+ if (isImplicit) new ImplicitFunction(params, t) else Function(params, t)
+ }
val t =
if (in.token == LPAREN) {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
- atPos(start, accept(ARROW)) { Function(Nil, typ()) }
+ functionRest(Nil)
}
else {
openParens.change(LPAREN, 1)
val ts = commaSeparated(funArgType)
openParens.change(LPAREN, -1)
accept(RPAREN)
- if (in.token == ARROW)
- atPos(start, in.skipToken()) { Function(ts, typ()) }
+ if (isImplicit || in.token == ARROW) functionRest(ts)
else {
for (t <- ts)
if (t.isInstanceOf[ByNameTypeTree])
@@ -709,8 +729,8 @@ object Parsers {
else infixType()
in.token match {
- case ARROW => atPos(start, in.skipToken()) { Function(List(t), typ()) }
- case FORSOME => syntaxError("existential types no longer supported; use a wildcard type or dependent type instead"); t
+ case ARROW => functionRest(t :: Nil)
+ case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t
case _ => t
}
}
@@ -728,7 +748,7 @@ object Parsers {
def refinedTypeRest(t: Tree): Tree = {
newLineOptWhenFollowedBy(LBRACE)
- if (in.token == LBRACE) refinedTypeRest(atPos(t.pos.start) { RefinedTypeTree(t, refinement()) })
+ if (in.token == LBRACE) refinedTypeRest(atPos(startOffset(t)) { RefinedTypeTree(t, refinement()) })
else t
}
@@ -749,11 +769,11 @@ object Parsers {
def annotType(): Tree = annotTypeRest(simpleType())
def annotTypeRest(t: Tree): Tree =
- if (in.token == AT) annotTypeRest(atPos(t.pos.start) { Annotated(t, annot()) })
+ if (in.token == AT) annotTypeRest(atPos(startOffset(t)) { Annotated(t, annot()) })
else t
/** SimpleType ::= SimpleType TypeArgs
- * | SimpleType `#' Id
+ * | SimpleType `#' id
* | StableId
* | Path `.' type
* | `(' ArgTypes `)'
@@ -763,7 +783,9 @@ object Parsers {
*/
def simpleType(): Tree = simpleTypeRest {
if (in.token == LPAREN)
- atPos(in.offset) { makeTupleOrParens(inParens(argTypes())) }
+ atPos(in.offset) {
+ makeTupleOrParens(inParens(argTypes(namedOK = false, wildOK = true)))
+ }
else if (in.token == LBRACE)
atPos(in.offset) { RefinedTypeTree(EmptyTree, refinement()) }
else if (isSimpleLiteral) { SingletonTypeTree(literal()) }
@@ -780,19 +802,20 @@ object Parsers {
val handleSingletonType: Tree => Tree = t =>
if (in.token == TYPE) {
in.nextToken()
- atPos(t.pos.start) { SingletonTypeTree(t) }
+ atPos(startOffset(t)) { SingletonTypeTree(t) }
} else t
private def simpleTypeRest(t: Tree): Tree = in.token match {
case HASH => simpleTypeRest(typeProjection(t))
- case LBRACKET => simpleTypeRest(atPos(t.pos.start) { AppliedTypeTree(t, typeArgs(namedOK = true)) })
+ case LBRACKET => simpleTypeRest(atPos(startOffset(t)) {
+ AppliedTypeTree(t, typeArgs(namedOK = true, wildOK = true)) })
case _ => t
}
private def typeProjection(t: Tree): Tree = {
accept(HASH)
val id = typeIdent()
- atPos(t.pos.start, id.pos.start) { Select(t, id.name) }
+ atPos(startOffset(t), startOffset(id)) { Select(t, id.name) }
}
/** NamedTypeArg ::= id `=' Type
@@ -806,7 +829,7 @@ object Parsers {
/** ArgTypes ::= Type {`,' Type}
* | NamedTypeArg {`,' NamedTypeArg}
*/
- def argTypes(namedOK: Boolean = false) = {
+ def argTypes(namedOK: Boolean, wildOK: Boolean) = {
def otherArgs(first: Tree, arg: () => Tree): List[Tree] = {
val rest =
if (in.token == COMMA) {
@@ -816,8 +839,9 @@ object Parsers {
else Nil
first :: rest
}
+ def typParser() = if (wildOK) typ() else toplevelTyp()
if (namedOK && in.token == IDENTIFIER)
- typ() match {
+ typParser() match {
case Ident(name) if in.token == EQUALS =>
in.nextToken()
otherArgs(NamedArg(name, typ()), namedTypeArg)
@@ -825,7 +849,7 @@ object Parsers {
if (in.token == EQUALS) println(s"??? $firstArg")
otherArgs(firstArg, typ)
}
- else commaSeparated(typ)
+ else commaSeparated(typParser)
}
/** FunArgType ::= Type | `=>' Type
@@ -846,14 +870,14 @@ object Parsers {
val t = toplevelTyp()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
- atPos(t.pos.start) { PostfixOp(t, nme.raw.STAR) }
+ atPos(startOffset(t)) { PostfixOp(t, nme.raw.STAR) }
} else t
}
/** TypeArgs ::= `[' Type {`,' Type} `]'
* NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]'
*/
- def typeArgs(namedOK: Boolean = false): List[Tree] = inBrackets(argTypes(namedOK))
+ def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = inBrackets(argTypes(namedOK, wildOK))
/** Refinement ::= `{' RefineStatSeq `}'
*/
@@ -932,14 +956,14 @@ object Parsers {
}
}
- /** Expr ::= FunParams `=>' Expr
+ /** Expr ::= [`implicit'] FunParams `=>' Expr
* | Expr1
* FunParams ::= Bindings
- * | [`implicit'] Id
+ * | id
* | `_'
* ExprInParens ::= PostfixExpr `:' Type
* | Expr
- * BlockResult ::= (FunParams | [`implicit'] Id `:' InfixType) => Block
+ * BlockResult ::= [`implicit'] FunParams `=>' Block
* | Expr1
* Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] else Expr]
* | `if' Expr `then' Expr [[semi] else Expr]
@@ -951,12 +975,12 @@ object Parsers {
* | `throw' Expr
* | `return' [Expr]
* | ForExpr
- * | [SimpleExpr `.'] Id `=' Expr
+ * | [SimpleExpr `.'] id `=' Expr
* | SimpleExpr1 ArgumentExprs `=' Expr
* | PostfixExpr [Ascription]
* | PostfixExpr `match' `{' CaseClauses `}'
* Bindings ::= `(' [Binding {`,' Binding}] `)'
- * Binding ::= (Id | `_') [`:' Type]
+ * Binding ::= (id | `_') [`:' Type]
* Ascription ::= `:' CompoundType
* | `:' Annotation {Annotation}
* | `:' `_' `*'
@@ -966,22 +990,27 @@ object Parsers {
def expr(): Tree = expr(Location.ElseWhere)
def expr(location: Location.Value): Tree = {
- val saved = placeholderParams
- placeholderParams = Nil
- val t = expr1(location)
- if (in.token == ARROW) {
- placeholderParams = saved
- closureRest(t.pos.start, location, convertToParams(t))
- }
- else if (isWildcard(t)) {
- placeholderParams = placeholderParams ::: saved
- t
+ val start = in.offset
+ if (in.token == IMPLICIT)
+ implicitClosure(start, location, implicitMods())
+ else {
+ val saved = placeholderParams
+ placeholderParams = Nil
+ val t = expr1(location)
+ if (in.token == ARROW) {
+ placeholderParams = saved
+ closureRest(start, location, convertToParams(t))
+ }
+ else if (isWildcard(t)) {
+ placeholderParams = placeholderParams ::: saved
+ t
+ }
+ else
+ try
+ if (placeholderParams.isEmpty) t
+ else new WildcardFunction(placeholderParams.reverse, t)
+ finally placeholderParams = saved
}
- else
- try
- if (placeholderParams.isEmpty) t
- else new WildcardFunction(placeholderParams.reverse, t)
- finally placeholderParams = saved
}
def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match {
@@ -1025,7 +1054,7 @@ object Parsers {
assert(handlerStart != -1)
syntaxError(
new EmptyCatchBlock(body),
- Position(handlerStart, handler.pos.end)
+ Position(handlerStart, endOffset(handler))
)
case _ =>
}
@@ -1035,7 +1064,7 @@ object Parsers {
else {
if (handler.isEmpty) warning(
EmptyCatchAndFinallyBlock(body),
- source atPos Position(tryOffset, body.pos.end)
+ source atPos Position(tryOffset, endOffset(body))
)
EmptyTree
}
@@ -1047,8 +1076,6 @@ object Parsers {
atPos(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) }
case FOR =>
forExpr()
- case IMPLICIT =>
- implicitClosure(in.skipToken(), location)
case _ =>
expr1Rest(postfixExpr(), location)
}
@@ -1057,21 +1084,21 @@ object Parsers {
case EQUALS =>
t match {
case Ident(_) | Select(_, _) | Apply(_, _) =>
- atPos(t.pos.start, in.skipToken()) { Assign(t, expr()) }
+ atPos(startOffset(t), in.skipToken()) { Assign(t, expr()) }
case _ =>
t
}
case COLON =>
ascription(t, location)
case MATCH =>
- atPos(t.pos.start, in.skipToken()) {
+ atPos(startOffset(t), in.skipToken()) {
inBraces(Match(t, caseClauses()))
}
case _ =>
t
}
- def ascription(t: Tree, location: Location.Value) = atPos(t.pos.start, in.skipToken()) {
+ def ascription(t: Tree, location: Location.Value) = atPos(startOffset(t), in.skipToken()) {
in.token match {
case USCORE =>
val uscoreStart = in.skipToken()
@@ -1096,20 +1123,53 @@ object Parsers {
}
}
- /** Expr ::= implicit Id `=>' Expr
- * BlockResult ::= implicit Id [`:' InfixType] `=>' Block
+ /** FunParams ::= Bindings
+ * | id
+ * | `_'
+ * Bindings ::= `(' [Binding {`,' Binding}] `)'
*/
- def implicitClosure(start: Int, location: Location.Value, implicitMod: Option[Mod] = None): Tree = {
- var mods = atPos(start) { Modifiers(Implicit) }
- if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
- val id = termIdent()
- val paramExpr =
- if (location == Location.InBlock && in.token == COLON)
- atPos(id.pos.start, in.skipToken()) { Typed(id, infixType()) }
- else
- id
- closureRest(start, location, convertToParam(paramExpr, mods) :: Nil)
- }
+ def funParams(mods: Modifiers, location: Location.Value): List[Tree] =
+ if (in.token == LPAREN)
+ inParens(if (in.token == RPAREN) Nil else commaSeparated(() => binding(mods)))
+ else {
+ val start = in.offset
+ val name = bindingName()
+ val t =
+ if (in.token == COLON && location == Location.InBlock) {
+ if (false) // Don't error yet, as the alternative syntax "implicit (x: T) => ... "
+ // is not supported by Scala2.x
+ migrationWarningOrError(s"This syntax is no longer supported; parameter needs to be enclosed in (...)")
+
+ in.nextToken()
+ val t = infixType()
+
+ if (false && in.isScala2Mode) {
+ patch(source, Position(start), "(")
+ patch(source, Position(in.lastOffset), ")")
+ }
+ t
+ }
+ else TypeTree()
+ (atPos(start) { makeParameter(name, t, mods) }) :: Nil
+ }
+
+ /** Binding ::= (id | `_') [`:' Type]
+ */
+ def binding(mods: Modifiers): Tree =
+ atPos(in.offset) { makeParameter(bindingName(), typedOpt(), mods) }
+
+ def bindingName(): TermName =
+ if (in.token == USCORE) {
+ in.nextToken()
+ ctx.freshName(nme.USCORE_PARAM_PREFIX).toTermName
+ }
+ else ident()
+
+ /** Expr ::= implicit id `=>' Expr
+ * BlockResult ::= implicit id [`:' InfixType] `=>' Block // Scala2 only
+ */
+ def implicitClosure(start: Int, location: Location.Value, implicitMods: Modifiers): Tree =
+ closureRest(start, location, funParams(implicitMods, location))
def closureRest(start: Int, location: Location.Value, params: List[Tree]): Tree =
atPos(start, in.offset) {
@@ -1117,9 +1177,9 @@ object Parsers {
Function(params, if (location == Location.InBlock) block() else expr())
}
- /** PostfixExpr ::= InfixExpr [Id [nl]]
+ /** PostfixExpr ::= InfixExpr [id [nl]]
* InfixExpr ::= PrefixExpr
- * | InfixExpr Id [nl] InfixExpr
+ * | InfixExpr id [nl] InfixExpr
*/
def postfixExpr(): Tree =
infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
@@ -1144,7 +1204,7 @@ object Parsers {
* | xmlLiteral
* | Path
* | `(' [ExprsInParens] `)'
- * | SimpleExpr `.' Id
+ * | SimpleExpr `.' id
* | SimpleExpr (TypeArgs | NamedTypeArgs)
* | SimpleExpr1 ArgumentExprs
*/
@@ -1194,13 +1254,13 @@ object Parsers {
in.nextToken()
simpleExprRest(selector(t), canApply = true)
case LBRACKET =>
- val tapp = atPos(t.pos.start, in.offset) { TypeApply(t, typeArgs(namedOK = true)) }
+ val tapp = atPos(startOffset(t), in.offset) { TypeApply(t, typeArgs(namedOK = true, wildOK = false)) }
simpleExprRest(tapp, canApply = true)
case LPAREN | LBRACE if canApply =>
- val app = atPos(t.pos.start, in.offset) { Apply(t, argumentExprs()) }
+ val app = atPos(startOffset(t), in.offset) { Apply(t, argumentExprs()) }
simpleExprRest(app, canApply = true)
case USCORE =>
- atPos(t.pos.start, in.skipToken()) { PostfixOp(t, nme.WILDCARD) }
+ atPos(startOffset(t), in.skipToken()) { PostfixOp(t, nme.WILDCARD) }
case _ =>
t
}
@@ -1284,7 +1344,7 @@ object Parsers {
if (in.token == IF) guard()
else {
val pat = pattern1()
- if (in.token == EQUALS) atPos(pat.pos.start, in.skipToken()) { GenAlias(pat, expr()) }
+ if (in.token == EQUALS) atPos(startOffset(pat), in.skipToken()) { GenAlias(pat, expr()) }
else generatorRest(pat)
}
@@ -1293,7 +1353,7 @@ object Parsers {
def generator(): Tree = generatorRest(pattern1())
def generatorRest(pat: Tree) =
- atPos(pat.pos.start, accept(LARROW)) { GenFrom(pat, expr()) }
+ atPos(startOffset(pat), accept(LARROW)) { GenFrom(pat, expr()) }
/** ForExpr ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
* {nl} [`yield'] Expr
@@ -1357,7 +1417,7 @@ object Parsers {
val pattern = () => {
val pat = pattern1()
if (isIdent(nme.raw.BAR))
- atPos(pat.pos.start) { Alternative(pat :: patternAlts()) }
+ atPos(startOffset(pat)) { Alternative(pat :: patternAlts()) }
else pat
}
@@ -1383,20 +1443,20 @@ object Parsers {
// compatibility for Scala2 `x @ _*` syntax
infixPattern() match {
case pt @ Ident(tpnme.WILDCARD_STAR) =>
- migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", p.pos.start)
- atPos(p.pos.start, offset) { Typed(p, pt) }
+ migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", startOffset(p))
+ atPos(startOffset(p), offset) { Typed(p, pt) }
case p =>
- atPos(p.pos.start, offset) { Bind(name, p) }
+ atPos(startOffset(p), offset) { Bind(name, p) }
}
case p @ Ident(tpnme.WILDCARD_STAR) =>
// compatibility for Scala2 `_*` syntax
- migrationWarningOrError("The syntax `_*' is no longer supported; use `x : _*' instead", p.pos.start)
- atPos(p.pos.start) { Typed(Ident(nme.WILDCARD), p) }
+ migrationWarningOrError("The syntax `_*' is no longer supported; use `x : _*' instead", startOffset(p))
+ atPos(startOffset(p)) { Typed(Ident(nme.WILDCARD), p) }
case p =>
p
}
- /** InfixPattern ::= SimplePattern {Id [nl] SimplePattern}
+ /** InfixPattern ::= SimplePattern {id [nl] SimplePattern}
*/
def infixPattern(): Tree =
infixOps(simplePattern(), canStartExpressionTokens, simplePattern, notAnOperator = nme.raw.BAR)
@@ -1408,14 +1468,14 @@ object Parsers {
* | SimplePattern1 [TypeArgs] [ArgumentPatterns]
* SimplePattern1 ::= Path
* | `{' Block `}'
- * | SimplePattern1 `.' Id
- * PatVar ::= Id
+ * | SimplePattern1 `.' id
+ * PatVar ::= id
* | `_'
*/
val simplePattern = () => in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
path(thisOK = true) match {
- case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(id.pos.start)
+ case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(startOffset(id))
case t => simplePatternRest(t)
}
case USCORE =>
@@ -1435,7 +1495,7 @@ object Parsers {
case XMLSTART =>
xmlLiteralPattern()
case _ =>
- if (isLiteral) literal()
+ if (isLiteral) literal(inPattern = true)
else {
syntaxErrorOrIncomplete(IllegalStartOfSimplePattern())
errorTermTree
@@ -1445,9 +1505,9 @@ object Parsers {
def simplePatternRest(t: Tree): Tree = {
var p = t
if (in.token == LBRACKET)
- p = atPos(t.pos.start, in.offset) { TypeApply(p, typeArgs()) }
+ p = atPos(startOffset(t), in.offset) { TypeApply(p, typeArgs(namedOK = false, wildOK = false)) }
if (in.token == LPAREN)
- p = atPos(t.pos.start, in.offset) { Apply(p, argumentPatterns()) }
+ p = atPos(startOffset(t), in.offset) { Apply(p, argumentPatterns()) }
p
}
@@ -1470,7 +1530,7 @@ object Parsers {
private def modOfToken(tok: Int): Mod = tok match {
case ABSTRACT => Mod.Abstract()
case FINAL => Mod.Final()
- case IMPLICIT => Mod.Implicit(ImplicitCommon)
+ case IMPLICIT => Mod.Implicit()
case INLINE => Mod.Inline()
case LAZY => Mod.Lazy()
case OVERRIDE => Mod.Override()
@@ -1526,7 +1586,7 @@ object Parsers {
def addMod(mods: Modifiers, mod: Mod): Modifiers =
addFlag(mods, mod.flags).withAddedMod(mod)
- /** AccessQualifier ::= "[" (Id | this) "]"
+ /** AccessQualifier ::= "[" (id | this) "]"
*/
def accessQualifierOpt(mods: Modifiers): Modifiers =
if (in.token == LBRACKET) {
@@ -1563,6 +1623,9 @@ object Parsers {
normalize(loop(start))
}
+ def implicitMods(): Modifiers =
+ addMod(EmptyModifiers, atPos(accept(IMPLICIT)) { Mod.Implicit() })
+
/** Wrap annotation or constructor in New(...).<init> */
def wrapNew(tpt: Tree) = Select(New(tpt), nme.CONSTRUCTOR)
@@ -1573,7 +1636,8 @@ object Parsers {
case Select(qual, name) => cpy.Select(tree)(adjustStart(start)(qual), name)
case _ => tree
}
- if (start < tree1.pos.start) tree1.withPos(tree1.pos.withStart(start))
+ if (tree1.pos.exists && start < tree1.pos.start)
+ tree1.withPos(tree1.pos.withStart(start))
else tree1
}
@@ -1601,16 +1665,16 @@ object Parsers {
/** ClsTypeParamClause::= `[' ClsTypeParam {`,' ClsTypeParam} `]'
* ClsTypeParam ::= {Annotation} [{Modifier} type] [`+' | `-']
- * Id [HkTypeParamClause] TypeParamBounds
+ * id [HkTypeParamClause] TypeParamBounds
*
* DefTypeParamClause::= `[' DefTypeParam {`,' DefTypeParam} `]'
- * DefTypeParam ::= {Annotation} Id [HkTypeParamClause] TypeParamBounds
+ * DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds
*
* TypTypeParamCaluse::= `[' TypTypeParam {`,' TypTypeParam} `]'
- * TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds
+ * TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds
*
* HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
- * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds
+ * HkTypeParam ::= {Annotation} ['+' | `-'] (id [HkTypePamClause] | _') TypeBounds
*/
def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
def typeParam(): TypeDef = {
@@ -1667,9 +1731,9 @@ object Parsers {
* Param ::= id `:' ParamType [`=' Expr]
*/
def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
- var implicitMod: Mod = null
- var firstClauseOfCaseClass = ofCaseClass
+ var imods: Modifiers = EmptyModifiers
var implicitOffset = -1 // use once
+ var firstClauseOfCaseClass = ofCaseClass
def param(): ValDef = {
val start = in.offset
var mods = annotsAsMods()
@@ -1704,7 +1768,7 @@ object Parsers {
if (in.token == ARROW) {
if (owner.isTypeName && !(mods is Local))
syntaxError(s"${if (mods is Mutable) "`var'" else "`val'"} parameters may not be call-by-name")
- else if (implicitMod != null)
+ else if (imods.hasFlags)
syntaxError("implicit parameters may not be call-by-name")
}
paramType()
@@ -1716,7 +1780,7 @@ object Parsers {
mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
implicitOffset = -1
}
- if (implicitMod != null) mods = addMod(mods, implicitMod)
+ for (imod <- imods.mods) mods = addMod(mods, imod)
ValDef(name, tpt, default).withMods(mods)
}
}
@@ -1725,7 +1789,7 @@ object Parsers {
else {
if (in.token == IMPLICIT) {
implicitOffset = in.offset
- implicitMod = atPos(in.skipToken()) { Mod.Implicit(Implicit) }
+ imods = implicitMods()
}
commaSeparated(param)
}
@@ -1735,7 +1799,7 @@ object Parsers {
if (in.token == LPAREN)
paramClause() :: {
firstClauseOfCaseClass = false
- if (implicitMod == null) clauses() else Nil
+ if (imods.hasFlags) Nil else clauses()
}
else Nil
}
@@ -1765,13 +1829,13 @@ object Parsers {
}
}
- /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ /** ImportExpr ::= StableId `.' (id | `_' | ImportSelectors)
*/
val importExpr = () => path(thisOK = false, handleImport) match {
case imp: Import =>
imp
case sel @ Select(qual, name) =>
- val selector = atPos(sel.pos.point) { Ident(name) }
+ val selector = atPos(pointOffset(sel)) { Ident(name) }
cpy.Import(sel)(qual, selector :: Nil)
case t =>
accept(DOT)
@@ -1799,12 +1863,12 @@ object Parsers {
}
}
- /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ /** ImportSelector ::= id [`=>' id | `=>' `_']
*/
def importSelector(): Tree = {
val from = termIdentOrWildcard()
if (from.name != nme.WILDCARD && in.token == ARROW)
- atPos(from.pos.start, in.skipToken()) {
+ atPos(startOffset(from), in.skipToken()) {
Thicket(from, termIdentOrWildcard())
}
else from
@@ -1844,9 +1908,9 @@ object Parsers {
}
/** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
- * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
- * ValDcl ::= Id {`,' Id} `:' Type
- * VarDcl ::= Id {`,' Id} `:' Type
+ * VarDef ::= PatDef | id {`,' id} `:' Type `=' `_'
+ * ValDcl ::= id {`,' id} `:' Type
+ * VarDcl ::= id {`,' id} `:' Type
*/
def patDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
val lhs = commaSeparated(pattern2)
@@ -1887,6 +1951,7 @@ object Parsers {
if (in.token == THIS) {
in.nextToken()
val vparamss = paramClauses(nme.CONSTRUCTOR)
+ if (in.isScala2Mode) newLineOptWhenFollowedBy(LBRACE)
val rhs = {
if (!(in.token == LBRACE && scala2ProcedureSyntax(""))) accept(EQUALS)
atPos(in.offset) { constrExpr() }
@@ -1898,6 +1963,7 @@ object Parsers {
val tparams = typeParamClauseOpt(ParamOwner.Def)
val vparamss = paramClauses(name)
var tpt = fromWithinReturnType(typedOpt())
+ if (in.isScala2Mode) newLineOptWhenFollowedBy(LBRACE)
val rhs =
if (in.token == EQUALS) {
in.nextToken()
@@ -1946,8 +2012,8 @@ object Parsers {
Block(stats, Literal(Constant(())))
}
- /** TypeDef ::= type Id [TypeParamClause] `=' Type
- * TypeDcl ::= type Id [TypeParamClause] TypeBounds
+ /** TypeDef ::= type id [TypeParamClause] `=' Type
+ * TypeDcl ::= type id [TypeParamClause] TypeBounds
*/
def typeDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = {
newLinesOpt()
@@ -1989,7 +2055,7 @@ object Parsers {
}
}
- /** ClassDef ::= Id [ClsTypeParamClause]
+ /** ClassDef ::= id [ClsTypeParamClause]
* [ConstrMods] ClsParamClauses TemplateOpt
*/
def classDef(start: Offset, mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(start, nameStart) {
@@ -2017,7 +2083,7 @@ object Parsers {
mods
}
- /** ObjectDef ::= Id TemplateOpt
+ /** ObjectDef ::= id TemplateOpt
*/
def objectDef(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = atPos(start, nameStart) {
val name = ident()
@@ -2085,7 +2151,7 @@ object Parsers {
/** Create a tree representing a packaging */
def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
- case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
+ case x: RefTree => atPos(start, pointOffset(pkg))(PackageDef(x, stats))
}
/** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
@@ -2198,9 +2264,9 @@ object Parsers {
stats.toList
}
- def localDef(start: Int, implicitFlag: FlagSet, implicitMod: Option[Mod] = None): Tree = {
- var mods = addFlag(defAnnotsMods(localModifierTokens), implicitFlag)
- if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ def localDef(start: Int, implicitMods: Modifiers = EmptyModifiers): Tree = {
+ var mods = defAnnotsMods(localModifierTokens)
+ for (imod <- implicitMods.mods) mods = addMod(mods, imod)
defOrDcl(start, mods)
}
@@ -2223,11 +2289,11 @@ object Parsers {
else if (isDefIntro(localModifierTokens))
if (in.token == IMPLICIT) {
val start = in.offset
- val mod = atPos(in.skipToken()) { Mod.Implicit(ImplicitCommon) }
- if (isIdent) stats += implicitClosure(start, Location.InBlock, Some(mod))
- else stats += localDef(start, ImplicitCommon, Some(mod))
+ val imods = implicitMods()
+ if (isBindingIntro) stats += implicitClosure(start, Location.InBlock, imods)
+ else stats += localDef(start, imods)
} else {
- stats += localDef(in.offset, EmptyFlags)
+ stats += localDef(in.offset)
}
else if (!isStatSep && (in.token != CASE)) {
exitOnError = mustStartStat
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
index 60003d098..101be167e 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -758,7 +758,7 @@ object Scanners {
finishStringPart()
nextRawChar()
next.token = LBRACE
- } else if (Character.isUnicodeIdentifierStart(ch)) {
+ } else if (Character.isUnicodeIdentifierStart(ch) || ch == '_') {
finishStringPart()
do {
putChar(ch)
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
index 5324207db..280832ef3 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -209,6 +209,8 @@ object Tokens extends TokensCommon {
final val canStartTypeTokens = literalTokens | identifierTokens | BitSet(
THIS, SUPER, USCORE, LPAREN, AT)
+ final val canStartBindingTokens = identifierTokens | BitSet(USCORE, LPAREN)
+
final val templateIntroTokens = BitSet(CLASS, TRAIT, OBJECT, CASECLASS, CASEOBJECT)
final val dclIntroTokens = BitSet(DEF, VAL, VAR, TYPE)
diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
index b321d3736..05f1af9d7 100644
--- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
@@ -96,7 +96,10 @@ object Formatting {
case tpe: Type =>
tpe.exists && !tpe.isErroneous
case sym: Symbol if sym.isCompleted =>
- sym.info != ErrorType && sym.info != TypeAlias(ErrorType) && sym.info.exists
+ sym.info match {
+ case _: ErrorType | TypeAlias(_: ErrorType) | NoType => false
+ case _ => true
+ }
case _ => true
}
diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
index 15c382bb0..61f23c214 100644
--- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -154,7 +154,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
changePrec(AndPrec) { toText(tp1) ~ " & " ~ toText(tp2) }
case OrType(tp1, tp2) =>
changePrec(OrPrec) { toText(tp1) ~ " | " ~ toText(tp2) }
- case ErrorType =>
+ case _: ErrorType =>
"<error>"
case tp: WildcardType =>
if (tp.optBounds.exists) "(?" ~ toTextRHS(tp.bounds) ~ ")" else "?"
diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index 00627fc28..3085ad8fd 100644
--- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -113,20 +113,21 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
override def toText(tp: Type): Text = controlled {
def toTextTuple(args: List[Type]): Text =
"(" ~ Text(args.map(argText), ", ") ~ ")"
- def toTextFunction(args: List[Type]): Text =
+ def toTextFunction(args: List[Type], isImplicit: Boolean): Text =
changePrec(GlobalPrec) {
val argStr: Text =
if (args.length == 2 && !defn.isTupleType(args.head))
atPrec(InfixPrec) { argText(args.head) }
else
toTextTuple(args.init)
- argStr ~ " => " ~ argText(args.last)
+ ("implicit " provided isImplicit) ~ argStr ~ " => " ~ argText(args.last)
}
homogenize(tp) match {
case AppliedType(tycon, args) =>
val cls = tycon.typeSymbol
if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*"
- if (defn.isFunctionClass(cls)) return toTextFunction(args)
+ if (defn.isFunctionClass(cls)) return toTextFunction(args, isImplicit = false)
+ if (defn.isImplicitFunctionClass(cls)) return toTextFunction(args, isImplicit = true)
if (defn.isTupleClass(cls)) return toTextTuple(args)
return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close
case tp: TypeRef =>
@@ -614,14 +615,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
(sym.allOverriddenSymbols exists (_ is TypeParam))
override def toText(sym: Symbol): Text = {
- if (sym.isImport) {
- def importString(tree: untpd.Tree) = s"import ${tree.show}"
+ if (sym.isImport)
sym.infoOrCompleter match {
- case info: Namer#Completer => return importString(info.original)
- case info: ImportType => return importString(info.expr)
+ case info: Namer#Completer => return info.original.show
+ case info: ImportType => return s"import $info.expr.show"
case _ =>
}
- }
if (sym.is(ModuleClass))
kindString(sym) ~~ (nameString(sym.name.stripModuleClassSuffix) + idString(sym))
else
diff --git a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
index 5b3669d5e..65c64f708 100644
--- a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
+++ b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
@@ -6,7 +6,7 @@ import java.io.{
File, PrintWriter, PrintStream, StringWriter, Writer, OutputStream,
ByteArrayOutputStream => ByteOutputStream
}
-import java.lang.{Class, ClassLoader}
+import java.lang.{Class, ClassLoader, Thread, System, StringBuffer}
import java.net.{URL, URLClassLoader}
import scala.collection.immutable.ListSet
@@ -443,7 +443,10 @@ class CompilingInterpreter(
}
// the types are all =>T; remove the =>
- val cleanedType = rawType.widenExpr
+ val cleanedType = rawType.widenExpr match {
+ case tp: MethodType => tp.resultType
+ case tp => tp
+ }
map + (name ->
ctx.atPhase(ctx.typerPhase.next) { implicit ctx =>
@@ -680,15 +683,49 @@ class CompilingInterpreter(
code.print(resultExtractors.mkString(""))
}
+ private val ListReg = """^.*List\[(\w+)\]$""".r
+ private val MapReg = """^.*Map\[(\w+),[ ]*(\w+)\]$""".r
+ private val LitReg = """^.*\((.+)\)$""".r
+
private def resultExtractor(req: Request, varName: Name): String = {
val prettyName = varName.decode
- val varType = string2code(req.typeOf(varName))
+ // FIXME: `varType` is prettified to abbreviate common types where
+ // appropriate, and to also prettify literal types
+ //
+ // This should be rewritten to use the actual types once we have a
+ // semantic representation available to the REPL
+ val varType = string2code(req.typeOf(varName)) match {
+ // Extract List's paremeter from full path
+ case ListReg(param) => s"List[$param]"
+ // Extract Map's paremeters from full path
+ case MapReg(k, v) => s"Map[$k, $v]"
+ // Extract literal type from literal type representation. Example:
+ //
+ // ```
+ // scala> val x: 42 = 42
+ // val x: Int(42) = 42
+ // scala> val y: "hello" = "hello"
+ // val y: String("hello") = "hello"
+ // ```
+ case LitReg(lit) => lit
+ // When the type is a singleton value like None, don't show `None$`
+ // instead show `None.type`.
+ case x if x.lastOption == Some('$') => x.init + ".type"
+ case x => x
+ }
val fullPath = req.fullPath(varName)
- s""" + "$prettyName: $varType = " + {
+ val varOrVal = statement match {
+ case v: ValDef if v.mods is Flags.Mutable => "var"
+ case _ => "val"
+ }
+
+ s""" + "$varOrVal $prettyName: $varType = " + {
| if ($fullPath.asInstanceOf[AnyRef] != null) {
- | (if ($fullPath.toString().contains('\\n')) "\\n" else "") +
- | $fullPath.toString() + "\\n"
+ | (if ($fullPath.toString().contains('\\n')) "\\n" else "") + {
+ | import dotty.Show._
+ | $fullPath.show /*toString()*/ + "\\n"
+ | }
| } else {
| "null\\n"
| }
@@ -735,9 +772,30 @@ class CompilingInterpreter(
override def defNames = boundNames
override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
- if (!defDef.mods.is(Flags.AccessFlags))
- code.print("+\"" + string2code(defDef.name.toString) + ": " +
- string2code(req.typeOf(defDef.name)) + "\\n\"")
+ /** TODO: This is the result of the state of the REPL - this would be
+ * entirely unnecessary with a better structure where we could just
+ * use the type printer
+ *
+ * @see `def findTypes` for an explanation of what should be done
+ */
+ if (!defDef.mods.is(Flags.AccessFlags)) {
+ // Take the DefDef and remove the `rhs` and ascribed type `tpt`
+ val copy = ast.untpd.cpy.DefDef(defDef)(
+ rhs = EmptyTree,
+ tpt = TypeTree
+ )
+
+ val tpt = defDef.tpt match {
+ // ascribed TypeExpr e.g: `def foo: Int = 5`
+ case Ident(tpt) if defDef.vparamss.isEmpty =>
+ ": " + tpt.show
+ case tpt =>
+ ": " + req.typeOf(defDef.name)
+ }
+ code.print {
+ "+\"" + string2code(copy.show) + tpt + "\\n\""
+ }
+ }
}
}
diff --git a/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala b/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala
index b3ac41c55..cfe8d892d 100644
--- a/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala
+++ b/compiler/src/dotty/tools/dotc/repl/InterpreterLoop.scala
@@ -4,7 +4,7 @@ package repl
import java.io.{BufferedReader, File, FileReader, PrintWriter}
import java.io.IOException
-import java.lang.{ClassLoader, System}
+import java.lang.{ClassLoader, System, Thread}
import scala.concurrent.{Future, Await}
import scala.concurrent.duration.Duration
import reporting.Reporter
diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
index 8477cfe28..26c1e5ebc 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -286,11 +286,16 @@ abstract class Reporter extends interfaces.ReporterResult {
}
/** Should this diagnostic not be reported at all? */
- def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean = ctx.mode.is(Mode.Printing)
+ def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
+ ctx.mode.is(Mode.Printing)
/** Does this reporter contain not yet reported errors or warnings? */
def hasPending: Boolean = false
+ /** If this reporter buffers messages, remove and return all buffered messages. */
+ def removeBufferedMessages(implicit ctx: Context): List[MessageContainer] = Nil
+
/** Issue all error messages in this reporter to next outer one, or make sure they are written. */
- def flush()(implicit ctx: Context): Unit = {}
+ def flush()(implicit ctx: Context): Unit =
+ removeBufferedMessages.foreach(ctx.reporter.report)
}
diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
index 586273c2e..34b109882 100644
--- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
@@ -36,11 +36,9 @@ class StoreReporter(outer: Reporter) extends Reporter {
}
}
- override def flush()(implicit ctx: Context) =
- if (infos != null) {
- infos.foreach(ctx.reporter.report(_))
- infos = null
- }
+ override def removeBufferedMessages(implicit ctx: Context): List[MessageContainer] =
+ if (infos != null) try infos.toList finally infos = null
+ else Nil
override def errorsReported = hasErrors || outer.errorsReported
}
diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
index 7fd50bfdc..c27644ad9 100644
--- a/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
@@ -14,7 +14,7 @@ object MessageContainer {
implicit class MessageContext(val c: Context) extends AnyVal {
def shouldExplain(cont: MessageContainer): Boolean = {
- implicit val ctx: Context = c
+ implicit val ctx = c
cont.contained.explanation match {
case "" => false
case _ => ctx.settings.explain.value
diff --git a/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
index 489165e56..89cd2cd8f 100644
--- a/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
@@ -5,8 +5,7 @@ package diagnostic
import dotc.core._
import Contexts.Context, Decorators._, Symbols._, Names._, NameOps._, Types._
-import util.{SourceFile, NoSource}
-import util.{SourcePosition, NoSourcePosition}
+import util.SourcePosition
import config.Settings.Setting
import interfaces.Diagnostic.{ERROR, WARNING, INFO}
import printing.Highlighting._
@@ -224,6 +223,8 @@ object messages {
extends Message(8) {
val kind = "Member Not Found"
+ //println(i"site = $site, decls = ${site.decls}, source = ${site.widen.typeSymbol.sourceFile}") //DEBUG
+
val msg = {
import core.Flags._
val maxDist = 3
@@ -606,7 +607,7 @@ object messages {
|"""
}
- case class WrongNumberOfArgs(fntpe: Type, argKind: String, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree])(implicit ctx: Context)
+ case class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree])(implicit ctx: Context)
extends Message(22) {
val kind = "Syntax"
@@ -628,7 +629,7 @@ object messages {
}
val msg =
- hl"""|${NoColor(msgPrefix)} ${argKind} arguments for $prettyName$expectedArgString
+ hl"""|${NoColor(msgPrefix)} type arguments for $prettyName$expectedArgString
|expected: $expectedArgString
|actual: $actualArgString""".stripMargin
@@ -899,4 +900,106 @@ object messages {
val msg = hl"trying to define package with same name as `$existing`"
val explanation = ""
}
+
+ case class ExistentialTypesNoLongerSupported()(implicit ctx: Context) extends Message(34) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|Existential types are no longer supported -
+ |use a wildcard or dependent type instead"""
+ val explanation =
+ hl"""|The use of existential types is no longer supported.
+ |
+ |You should use a wildcard or dependent type instead.
+ |
+ |For example:
+ |
+ |Instead of using ${"forSome"} to specify a type variable
+ |
+ |${"List[T forSome { type T }]"}
+ |
+ |Try using a wildcard type variable
+ |
+ |${"List[_]"}
+ |"""
+ }
+
+ case class UnboundWildcardType()(implicit ctx: Context) extends Message(35) {
+ val kind = "Syntax"
+ val msg = "Unbound wildcard type"
+ val explanation =
+ hl"""|The wildcard type syntax (`_`) was used where it could not be bound.
+ |Replace `_` with a non-wildcard type. If the type doesn't matter,
+ |try replacing `_` with ${"Any"}.
+ |
+ |Examples:
+ |
+ |- Parameter lists
+ |
+ | Instead of:
+ | ${"def foo(x: _) = ..."}
+ |
+ | Use ${"Any"} if the type doesn't matter:
+ | ${"def foo(x: Any) = ..."}
+ |
+ |- Type arguments
+ |
+ | Instead of:
+ | ${"val foo = List[_](1, 2)"}
+ |
+ | Use:
+ | ${"val foo = List[Int](1, 2)"}
+ |
+ |- Type bounds
+ |
+ | Instead of:
+ | ${"def foo[T <: _](x: T) = ..."}
+ |
+ | Remove the bounds if the type doesn't matter:
+ | ${"def foo[T](x: T) = ..."}
+ |
+ |- ${"val"} and ${"def"} types
+ |
+ | Instead of:
+ | ${"val foo: _ = 3"}
+ |
+ | Use:
+ | ${"val foo: Int = 3"}
+ |"""
+ }
+
+ case class DanglingThisInPath()(implicit ctx: Context) extends Message(36) {
+ val kind = "Syntax"
+ val msg = hl"""Expected an additional member selection after the keyword ${"this"}"""
+
+ val contextCode =
+ """ trait Outer {
+ | val member: Int
+ | type Member
+ | trait Inner {
+ | ...
+ | }
+ | }"""
+
+ val importCode =
+ """ import Outer.this.member
+ | // ^^^^^^^"""
+
+ val typeCode =
+ """ type T = Outer.this.Member
+ | // ^^^^^^^"""
+
+ val explanation =
+ hl"""|Paths of imports and type selections must not end with the keyword ${"this"}.
+ |
+ |Maybe you forgot to select a member of ${"this"}? As an example, in the
+ |following context:
+ |${contextCode}
+ |
+ |- this is a valid import expression using a path
+ |${importCode}
+ |
+ |- this is a valid type using a path
+ |${typeCode}
+ |"""
+ }
}
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
index bc8528c05..1fffe6841 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -3,7 +3,7 @@ package sbt
import ast.{Trees, tpd}
import core._, core.Decorators._
-import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
+import Annotations._, Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
import Names._, NameOps._, StdNames._
import typer.Inliner
@@ -333,7 +333,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
// TODO: Never dealias. We currently have to dealias because
// sbt main class discovery relies on the signature of the main
// method being fully dealiased. See https://github.com/sbt/zinc/issues/102
- val tp2 = if (!tp.isHK) tp.dealias else tp
+ val tp2 = if (!tp.isHK) tp.dealiasKeepAnnots else tp
tp2 match {
case NoPrefix | NoType =>
Constants.emptyType
@@ -411,9 +411,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
case ConstantType(constant) =>
new api.Constant(apiType(constant.tpe), constant.stringValue)
case AnnotatedType(tpe, annot) =>
- // TODO: Annotation support
- ctx.debuglog(i"sbt-api: skipped annotation in $tp2")
- apiType(tpe)
+ new api.Annotated(apiType(tpe), Array(apiAnnotation(annot)))
case tp: ThisType =>
apiThis(tp.cls)
case tp: ParamType =>
@@ -498,7 +496,6 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
sym.is(Implicit), sym.is(Lazy), sym.is(Macro), sym.is(SuperAccessor))
}
- // TODO: Support other annotations
def apiAnnotations(s: Symbol): List[api.Annotation] = {
val annots = new mutable.ListBuffer[api.Annotation]
@@ -513,6 +510,27 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder
annots += marker(Inliner.bodyToInline(s).show(printTypesCtx).toString)
}
+ // In the Scala2 ExtractAPI phase we only extract annotations that extend
+ // StaticAnnotation, but in Dotty we currently pickle all annotations so we
+ // extract everything (except inline body annotations which are handled
+ // above).
+ s.annotations.filter(_.symbol != defn.BodyAnnot) foreach { annot =>
+ annots += apiAnnotation(annot)
+ }
+
annots.toList
}
+
+ def apiAnnotation(annot: Annotation): api.Annotation = {
+ // FIXME: To faithfully extract an API we should extract the annotation tree,
+ // sbt instead wants us to extract the annotation type and its arguments,
+ // to do this properly we would need a way to hash trees and types in dotty itself,
+ // instead we pretty-print the annotation tree.
+ // However, we still need to extract the annotation type in the way sbt expect
+ // because sbt uses this information to find tests to run (for example
+ // junit tests are annotated @org.junit.Test).
+ new api.Annotation(
+ apiType(annot.tree.tpe), // Used by sbt to find tests to run
+ Array(new api.AnnotationArgument("FULLTREE", annot.tree.show.toString)))
+ }
}
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
index 714255962..331fce46a 100644
--- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -9,7 +9,6 @@ import dotty.tools.dotc.core.Symbols.NoSymbol
import scala.annotation.tailrec
import dotty.tools.dotc.core._
import Symbols._
-import scala.Some
import dotty.tools.dotc.transform.TreeTransforms.{NXTransformations, TransformerInfo, TreeTransform, TreeTransformer}
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Contexts.Context
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
index 192227261..2814baf1e 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -71,7 +71,8 @@ class ElimByName extends MiniPhaseTransform with InfoTransformer { thisTransform
def transformArg(arg: Tree, formal: Type): Tree = formal.dealias match {
case formalExpr: ExprType =>
- val argType = arg.tpe.widen
+ var argType = arg.tpe.widenIfUnstable
+ if (defn.isBottomType(argType)) argType = formal.widenExpr
val argFun = arg match {
case Apply(Select(qual, nme.apply), Nil)
if qual.tpe.derivesFrom(defn.FunctionClass(0)) && isPureExpr(qual) =>
diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
index 069176111..71ecb5c65 100644
--- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
@@ -13,6 +13,7 @@ import core.StdNames._
import core.NameOps._
import core.Decorators._
import core.Constants._
+import core.Definitions._
import typer.NoChecking
import typer.ProtoTypes._
import typer.ErrorReporting._
@@ -36,9 +37,17 @@ class Erasure extends Phase with DenotTransformer { thisTransformer =>
def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
case ref: SymDenotation =>
+ def isCompacted(sym: Symbol) =
+ sym.isAnonymousFunction && {
+ sym.info(ctx.withPhase(ctx.phase.next)) match {
+ case MethodType(nme.ALLARGS :: Nil, _) => true
+ case _ => false
+ }
+ }
+
assert(ctx.phase == this, s"transforming $ref at ${ctx.phase}")
if (ref.symbol eq defn.ObjectClass) {
- // Aftre erasure, all former Any members are now Object members
+ // After erasure, all former Any members are now Object members
val ClassInfo(pre, _, ps, decls, selfInfo) = ref.info
val extendedScope = decls.cloneScope
for (decl <- defn.AnyClass.classInfo.decls)
@@ -59,7 +68,10 @@ class Erasure extends Phase with DenotTransformer { thisTransformer =>
val oldInfo = ref.info
val newInfo = transformInfo(ref.symbol, oldInfo)
val oldFlags = ref.flags
- val newFlags = ref.flags &~ Flags.HasDefaultParams // HasDefaultParams needs to be dropped because overriding might become overloading
+ val newFlags =
+ if (oldSymbol.is(Flags.TermParam) && isCompacted(oldSymbol.owner)) oldFlags &~ Flags.Param
+ else oldFlags &~ Flags.HasDefaultParams // HasDefaultParams needs to be dropped because overriding might become overloading
+
// TODO: define derivedSymDenotation?
if ((oldSymbol eq newSymbol) && (oldOwner eq newOwner) && (oldInfo eq newInfo) && (oldFlags == newFlags)) ref
else {
@@ -82,7 +94,7 @@ class Erasure extends Phase with DenotTransformer { thisTransformer =>
assertErased(tree)
tree match {
case res: tpd.This =>
- assert(!ExplicitOuter.referencesOuter(ctx.owner.enclosingClass, res),
+ assert(!ExplicitOuter.referencesOuter(ctx.owner.lexicallyEnclosingClass, res),
i"Reference to $res from ${ctx.owner.showLocated}")
case ret: tpd.Return =>
// checked only after erasure, as checking before erasure is complicated
@@ -331,8 +343,25 @@ object Erasure extends TypeTestsCasts{
* e.m -> e.[]m if `m` is an array operation other than `clone`.
*/
override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
- val sym = tree.symbol
- assert(sym.exists, tree.show)
+
+ def mapOwner(sym: Symbol): Symbol = {
+ def recur(owner: Symbol): Symbol =
+ if ((owner eq defn.AnyClass) || (owner eq defn.AnyValClass)) {
+ assert(sym.isConstructor, s"${sym.showLocated}")
+ defn.ObjectClass
+ } else if (defn.isUnimplementedFunctionClass(owner))
+ defn.FunctionXXLClass
+ else if (defn.isImplicitFunctionClass(owner))
+ recur(defn.FunctionClass(owner.name.functionArity))
+ else
+ owner
+ recur(sym.owner)
+ }
+
+ val origSym = tree.symbol
+ val owner = mapOwner(origSym)
+ val sym = if (owner eq origSym.owner) origSym else owner.info.decl(origSym.name).symbol
+ assert(sym.exists, origSym.showLocated)
def select(qual: Tree, sym: Symbol): Tree = {
val name = tree.typeOpt match {
@@ -366,11 +395,7 @@ object Erasure extends TypeTestsCasts{
def recur(qual: Tree): Tree = {
val qualIsPrimitive = qual.tpe.widen.isPrimitiveValueType
val symIsPrimitive = sym.owner.isPrimitiveValueClass
- if ((sym.owner eq defn.AnyClass) || (sym.owner eq defn.AnyValClass)) {
- assert(sym.isConstructor, s"${sym.showLocated}")
- select(qual, defn.ObjectClass.info.decl(sym.name).symbol)
- }
- else if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType)
+ if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType)
recur(box(qual))
else if (!qualIsPrimitive && symIsPrimitive)
recur(unbox(qual, sym.owner.typeRef))
@@ -389,7 +414,7 @@ object Erasure extends TypeTestsCasts{
}
override def typedThis(tree: untpd.This)(implicit ctx: Context): Tree =
- if (tree.symbol == ctx.owner.enclosingClass || tree.symbol.isStaticOwner) promote(tree)
+ if (tree.symbol == ctx.owner.lexicallyEnclosingClass || tree.symbol.isStaticOwner) promote(tree)
else {
ctx.log(i"computing outer path from ${ctx.owner.ownersIterator.toList}%, % to ${tree.symbol}, encl class = ${ctx.owner.enclosingClass}")
outer.path(tree.symbol)
@@ -423,6 +448,9 @@ object Erasure extends TypeTestsCasts{
}
}
+ /** Besides normal typing, this method collects all arguments
+ * to a compacted function into a single argument of array type.
+ */
override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
val Apply(fun, args) = tree
if (fun.symbol == defn.dummyApply)
@@ -434,7 +462,13 @@ object Erasure extends TypeTestsCasts{
fun1.tpe.widen match {
case mt: MethodType =>
val outers = outer.args(fun.asInstanceOf[tpd.Tree]) // can't use fun1 here because its type is already erased
- val args1 = (outers ::: args ++ protoArgs(pt)).zipWithConserve(mt.paramTypes)(typedExpr)
+ var args0 = outers ::: args ++ protoArgs(pt)
+ if (args0.length > MaxImplementedFunctionArity && mt.paramTypes.length == 1) {
+ val bunchedArgs = untpd.JavaSeqLiteral(args0, TypeTree(defn.ObjectType))
+ .withType(defn.ArrayOf(defn.ObjectType))
+ args0 = bunchedArgs :: Nil
+ }
+ val args1 = args0.zipWithConserve(mt.paramTypes)(typedExpr)
untpd.cpy.Apply(tree)(fun1, args1) withType mt.resultType
case _ =>
throw new MatchError(i"tree $tree has unexpected type of function ${fun1.tpe.widen}, was ${fun.typeOpt.widen}")
@@ -470,18 +504,36 @@ object Erasure extends TypeTestsCasts{
super.typedValDef(untpd.cpy.ValDef(vdef)(
tpt = untpd.TypedSplice(TypeTree(sym.info).withPos(vdef.tpt.pos))), sym)
+ /** Besides normal typing, this function also compacts anonymous functions
+ * with more than `MaxImplementedFunctionArity` parameters to ise a single
+ * parameter of type `[]Object`.
+ */
override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = {
val restpe =
if (sym.isConstructor) defn.UnitType
else sym.info.resultType
+ var vparamss1 = (outer.paramDefs(sym) ::: ddef.vparamss.flatten) :: Nil
+ var rhs1 = ddef.rhs match {
+ case id @ Ident(nme.WILDCARD) => untpd.TypedSplice(id.withType(restpe))
+ case _ => ddef.rhs
+ }
+ if (sym.isAnonymousFunction && vparamss1.head.length > MaxImplementedFunctionArity) {
+ val bunchedParam = ctx.newSymbol(sym, nme.ALLARGS, Flags.TermParam, JavaArrayType(defn.ObjectType))
+ def selector(n: Int) = ref(bunchedParam)
+ .select(defn.Array_apply)
+ .appliedTo(Literal(Constant(n)))
+ val paramDefs = vparamss1.head.zipWithIndex.map {
+ case (paramDef, idx) =>
+ assignType(untpd.cpy.ValDef(paramDef)(rhs = selector(idx)), paramDef.symbol)
+ }
+ vparamss1 = (tpd.ValDef(bunchedParam) :: Nil) :: Nil
+ rhs1 = untpd.Block(paramDefs, rhs1)
+ }
val ddef1 = untpd.cpy.DefDef(ddef)(
tparams = Nil,
- vparamss = (outer.paramDefs(sym) ::: ddef.vparamss.flatten) :: Nil,
+ vparamss = vparamss1,
tpt = untpd.TypedSplice(TypeTree(restpe).withPos(ddef.tpt.pos)),
- rhs = ddef.rhs match {
- case id @ Ident(nme.WILDCARD) => untpd.TypedSplice(id.withType(restpe))
- case _ => ddef.rhs
- })
+ rhs = rhs1)
super.typedDefDef(ddef1, sym)
}
@@ -525,7 +577,7 @@ object Erasure extends TypeTestsCasts{
val bridge = ctx.newSymbol(ctx.owner, nme.ANON_FUN, Flags.Synthetic | Flags.Method, sam.info)
val bridgeCtx = ctx.withOwner(bridge)
Closure(bridge, bridgeParamss => {
- implicit val ctx: Context = bridgeCtx
+ implicit val ctx = bridgeCtx
val List(bridgeParams) = bridgeParamss
val rhs = Apply(meth, (bridgeParams, implParamTypes).zipped.map(adapt(_, _)))
@@ -641,7 +693,7 @@ object Erasure extends TypeTestsCasts{
val bridgeCtx = ctx.withOwner(bridge)
tpd.DefDef(bridge, { paramss: List[List[tpd.Tree]] =>
- implicit val ctx: Context = bridgeCtx
+ implicit val ctx = bridgeCtx
val rhs = paramss.foldLeft(sel)((fun, vparams) =>
fun.tpe.widen match {
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
index 3fec47e9f..c2aacf826 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -81,21 +81,26 @@ class ExplicitOuter extends MiniPhaseTransform with InfoTransformer { thisTransf
!needsOuterAlways(cls) &&
impl.existsSubTree(referencesOuter(cls, _)))
ensureOuterAccessors(cls)
- if (hasOuter(cls)) {
+
+ val clsHasOuter = hasOuter(cls)
+ if (clsHasOuter || cls.mixins.exists(needsOuterIfReferenced)) {
val newDefs = new mutable.ListBuffer[Tree]
- if (isTrait)
- newDefs += DefDef(outerAccessor(cls).asTerm, EmptyTree)
- else {
- val outerParamAcc = outerParamAccessor(cls)
- newDefs += ValDef(outerParamAcc, EmptyTree)
- newDefs += DefDef(outerAccessor(cls).asTerm, ref(outerParamAcc))
+
+ if (clsHasOuter) {
+ if (isTrait)
+ newDefs += DefDef(outerAccessor(cls).asTerm, EmptyTree)
+ else {
+ val outerParamAcc = outerParamAccessor(cls)
+ newDefs += ValDef(outerParamAcc, EmptyTree)
+ newDefs += DefDef(outerAccessor(cls).asTerm, ref(outerParamAcc))
+ }
}
for (parentTrait <- cls.mixins) {
if (needsOuterIfReferenced(parentTrait)) {
val parentTp = cls.denot.thisType.baseTypeRef(parentTrait)
val outerAccImpl = newOuterAccessor(cls, parentTrait).enteredAfter(thisTransformer)
- newDefs += DefDef(outerAccImpl, singleton(outerPrefix(parentTp)))
+ newDefs += DefDef(outerAccImpl, singleton(fixThis(outerPrefix(parentTp))))
}
}
@@ -147,7 +152,8 @@ object ExplicitOuter {
private def newOuterSym(owner: ClassSymbol, cls: ClassSymbol, name: TermName, flags: FlagSet)(implicit ctx: Context) = {
val target = cls.owner.enclosingClass.typeRef
val info = if (flags.is(Method)) ExprType(target) else target
- ctx.newSymbol(owner, name, Synthetic | flags, info, coord = cls.coord)
+ ctx.withPhaseNoEarlier(ctx.explicitOuterPhase.next) // outer accessors are entered at explicitOuter + 1, should not be defined before.
+ .newSymbol(owner, name, Synthetic | flags, info, coord = cls.coord)
}
/** A new param accessor for the outer field in class `cls` */
@@ -180,6 +186,7 @@ object ExplicitOuter {
private def needsOuterAlways(cls: ClassSymbol)(implicit ctx: Context): Boolean =
needsOuterIfReferenced(cls) &&
(!hasLocalInstantiation(cls) || // needs outer because we might not know whether outer is referenced or not
+ cls.mixins.exists(needsOuterIfReferenced) || // needs outer for parent traits
cls.classInfo.parents.exists(parent => // needs outer to potentially pass along to parent
needsOuterIfReferenced(parent.classSymbol.asClass)))
@@ -269,12 +276,32 @@ object ExplicitOuter {
if (tpe.prefix eq NoPrefix) cls.owner.enclosingClass.thisType
else tpe.prefix
case _ =>
- outerPrefix(tpe.underlying)
+ // Need to be careful to dealias before erasure, otherwise we lose prefixes.
+ outerPrefix(tpe.underlying(ctx.withPhaseNoLater(ctx.erasurePhase)))
}
case tpe: TypeProxy =>
outerPrefix(tpe.underlying)
}
+ /** It's possible (i1755.scala gives an example) that the type
+ * given by outerPrefix contains a This-reference to a module outside
+ * the context where that module is defined. This needs to be translated
+ * to an access to the module object from the enclosing class or object.
+ *
+ * This solution is a bit of a hack; it would be better to avoid
+ * such references to the This of a module from outside the module
+ * in the first place. I was not yet able to find out how such references
+ * arise and how to avoid them.
+ */
+ private def fixThis(tpe: Type)(implicit ctx: Context): Type = tpe match {
+ case tpe: ThisType if tpe.cls.is(Module) && !ctx.owner.isContainedIn(tpe.cls) =>
+ fixThis(TermRef(tpe.cls.owner.thisType, tpe.cls.sourceModule.asTerm))
+ case tpe: TermRef =>
+ tpe.derivedSelect(fixThis(tpe.prefix))
+ case _ =>
+ tpe
+ }
+
def outer(implicit ctx: Context): OuterOps = new OuterOps(ctx)
/** The operations in this class
@@ -313,7 +340,7 @@ object ExplicitOuter {
val cls = fun.symbol.owner.asClass
def outerArg(receiver: Tree): Tree = receiver match {
case New(_) | Super(_, _) =>
- singleton(outerPrefix(receiver.tpe))
+ singleton(fixThis(outerPrefix(receiver.tpe)))
case This(_) =>
ref(outerParamAccessor(cls)) // will be rewired to outer argument of secondary constructor in phase Constructors
case TypeApply(Select(r, nme.asInstanceOf_), args) =>
@@ -330,7 +357,7 @@ object ExplicitOuter {
/** The path of outer accessors that references `toCls.this` starting from
* the context owner's this node.
*/
- def path(toCls: Symbol, start: Tree = This(ctx.owner.enclosingClass.asClass)): Tree = try {
+ def path(toCls: Symbol, start: Tree = This(ctx.owner.lexicallyEnclosingClass.asClass)): Tree = try {
def loop(tree: Tree): Tree = {
val treeCls = tree.tpe.widen.classSymbol
val outerAccessorCtx = ctx.withPhaseNoLater(ctx.lambdaLiftPhase) // lambdalift mangles local class names, which means we cannot reliably find outer acessors anymore
diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index 5ae4e8a54..925ec08b2 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -135,14 +135,6 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
// TODO: this is state and should be per-run
// todo: check that when transformation finished map is empty
- private def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: ClassSymbol)(implicit ctx: Context): Unit =
- if (seen contains clazz)
- ctx.error("value class may not unbox to itself", pos)
- else {
- val unboxed = underlyingOfValueClass(clazz).typeSymbol
- if (isDerivedValueClass(unboxed)) checkNonCyclic(pos, seen + clazz, unboxed.asClass)
- }
-
override def transformTemplate(tree: tpd.Template)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
if (isDerivedValueClass(ctx.owner)) {
/* This is currently redundant since value classes may not
diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
index 7c60e8d72..21ca8dbfd 100644
--- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -96,14 +96,24 @@ class InterceptedMethods extends MiniPhaseTransform {
s"that means the intercepted methods set doesn't match the code")
tree
}
- lazy val Select(qual, _) = tree.fun
+ lazy val qual = tree.fun match {
+ case Select(qual, _) => qual
+ case ident @ Ident(_) =>
+ ident.tpe match {
+ case TermRef(prefix: TermRef, _) =>
+ tpd.ref(prefix)
+ case TermRef(prefix: ThisType, _) =>
+ tpd.This(prefix.cls)
+ }
+
+ }
val Any_## = this.Any_##
val Any_!= = defn.Any_!=
val rewrite: Tree = tree.fun.symbol match {
case Any_## =>
- poundPoundValue(qual)
+ poundPoundValue(qual)
case Any_!= =>
- qual.select(defn.Any_==).appliedToArgs(tree.args).select(defn.Boolean_!)
+ qual.select(defn.Any_==).appliedToArgs(tree.args).select(defn.Boolean_!)
/*
/* else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// todo: this is needed to support value classes
@@ -121,7 +131,7 @@ class InterceptedMethods extends MiniPhaseTransform {
// we get a primitive form of _getClass trying to target a boxed value
// so we need replace that method name with Object_getClass to get correct behavior.
// See SI-5568.
- qual.selectWithSig(defn.Any_getClass).appliedToNone
+ qual.selectWithSig(defn.Any_getClass).appliedToNone
case _ =>
tree
}
diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
index e63a7c3a7..a6ac71286 100644
--- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -141,7 +141,7 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer {
val initSymbol = ctx.newSymbol(x.symbol.owner, initName, initFlags, MethodType(Nil, tpe), coord = x.pos)
val result = ref(holderSymbol).select(lazyNme.value)
val flag = ref(holderSymbol).select(lazyNme.initialized)
- val initer = valueInitter.changeOwner(x.symbol, initSymbol)
+ val initer = valueInitter.changeOwnerAfter(x.symbol, initSymbol, this)
val initBody =
adaptToType(
ref(holderSymbol).select(defn.Object_synchronized).appliedTo(
diff --git a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
index 6a273b91e..d01195614 100644
--- a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
@@ -57,7 +57,7 @@ class LiftTry extends MiniPhase with IdentityDenotTransformer { thisTransform =>
ctx.debuglog(i"lifting tree at ${tree.pos}, current owner = ${ctx.owner}")
val fn = ctx.newSymbol(
ctx.owner, ctx.freshName("liftedTree").toTermName, Synthetic | Method,
- MethodType(Nil, tree.tpe), coord = tree.pos)
+ MethodType(Nil, tree.tpe.widenIfUnstable), coord = tree.pos)
tree.changeOwnerAfter(ctx.owner, fn, thisTransform)
Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone)
}
diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
index 01c240e3a..8b5ceb0aa 100644
--- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
@@ -47,7 +47,9 @@ import Decorators._
}
tree match {
case ddef: DefDef
- if !ddef.symbol.is(Deferred) && ddef.rhs == EmptyTree =>
+ if !ddef.symbol.is(Deferred) &&
+ !ddef.symbol.isConstructor && // constructors bodies are added later at phase Constructors
+ ddef.rhs == EmptyTree =>
errorLackImplementation(ddef)
case tdef: TypeDef
if tdef.symbol.isClass && !tdef.symbol.is(Deferred) && tdef.rhs == EmptyTree =>
@@ -89,32 +91,19 @@ import Decorators._
}
lazy val field = sym.field.orElse(newField).asTerm
-
+
def adaptToField(tree: Tree) =
if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen)
-
- if (sym.is(Accessor, butNot = NoFieldNeeded))
- if (sym.isGetter) {
- def skipBlocks(t: Tree): Tree = t match {
- case Block(_, t1) => skipBlocks(t1)
- case _ => t
- }
- skipBlocks(tree.rhs) match {
- case lit: Literal if sym.is(Final) && isIdempotentExpr(tree.rhs) =>
- // duplicating scalac behavior: for final vals that have rhs as constant, we do not create a field
- // and instead return the value. This seemingly minor optimization has huge effect on initialization
- // order and the values that can be observed during superconstructor call
- // see remark about idempotency in PostTyper#normalizeTree
- cpy.DefDef(tree)(rhs = lit)
- case _ =>
- var rhs = tree.rhs.changeOwnerAfter(sym, field, thisTransform)
- if (isWildcardArg(rhs)) rhs = EmptyTree
+ val NoFieldNeeded = Lazy | Deferred | JavaDefined | (if (ctx.settings.YnoInline.value) EmptyFlags else Inline)
- val fieldDef = transformFollowing(ValDef(field, adaptToField(rhs)))
- val getterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(ref(field))(ctx.withOwner(sym), info))
- Thicket(fieldDef, getterDef)
- }
+ if (sym.is(Accessor, butNot = NoFieldNeeded))
+ if (sym.isGetter) {
+ var rhs = tree.rhs.changeOwnerAfter(sym, field, thisTransform)
+ if (isWildcardArg(rhs)) rhs = EmptyTree
+ val fieldDef = transformFollowing(ValDef(field, adaptToField(rhs)))
+ val getterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(ref(field))(ctx.withOwner(sym), info))
+ Thicket(fieldDef, getterDef)
} else if (sym.isSetter) {
if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs } // this is intended as an assertion
field.setFlag(Mutable) // necessary for vals mixed in from Scala2 traits
@@ -125,5 +114,4 @@ import Decorators._
// neither getters nor setters
else tree
}
- private val NoFieldNeeded = Lazy | Deferred | JavaDefined
}
diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
index 9571c387b..a72e10681 100644
--- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
@@ -55,7 +55,9 @@ class ParamForwarding(thisTransformer: DenotTransformer) {
stat match {
case stat: ValDef =>
val sym = stat.symbol.asTerm
- if (sym is (ParamAccessor, butNot = Mutable)) {
+ if (sym.is(ParamAccessor, butNot = Mutable) && !sym.info.isInstanceOf[ExprType]) {
+ // ElimByName gets confused with methods returning an ExprType,
+ // so avoid param forwarding if parameter is by name. See i1766.scala
val idx = superArgs.indexWhere(_.symbol == sym)
if (idx >= 0 && superParamNames(idx) == stat.name) { // supercall to like-named parameter
val alias = inheritedAccessor(sym)
diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 3e25cf82e..181dfccd9 100644
--- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -235,14 +235,21 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
// next: MatchMonad[U]
// returns MatchMonad[U]
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
-
- val getTp = extractorMemberType(prev.tpe, nme.get)
- val isDefined = extractorMemberType(prev.tpe, nme.isDefined)
-
- if ((isDefined isRef defn.BooleanClass) && getTp.exists) {
- // isDefined and get may be overloaded
- val getDenot = prev.tpe.member(nme.get).suchThat(_.info.isParameterless)
- val isDefinedDenot = prev.tpe.member(nme.isDefined).suchThat(_.info.isParameterless)
+ val resultArity = defn.productArity(b.info)
+ if (isProductMatch(prev.tpe, resultArity)) {
+ val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+ ifThenElseZero(
+ nullCheck,
+ Block(
+ List(ValDef(b.asTerm, prev)),
+ next //Substitution(b, ref(prevSym))(next)
+ )
+ )
+ }
+ else {
+ val getDenot = extractorMember(prev.tpe, nme.get)
+ val isEmptyDenot = extractorMember(prev.tpe, nme.isEmpty)
+ assert(getDenot.exists && isEmptyDenot.exists, i"${prev.tpe}")
val tmpSym = freshSym(prev.pos, prev.tpe, "o")
val prevValue = ref(tmpSym).select(getDenot.symbol).ensureApplied
@@ -251,20 +258,10 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
List(ValDef(tmpSym, prev)),
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
ifThenElseZero(
- ref(tmpSym).select(isDefinedDenot.symbol),
+ ref(tmpSym).select(isEmptyDenot.symbol).select(defn.Boolean_!),
Block(List(ValDef(b.asTerm, prevValue)), next)
)
)
- } else {
- assert(defn.isProductSubType(prev.tpe))
- val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
- ifThenElseZero(
- nullCheck,
- Block(
- List(ValDef(b.asTerm, prev)),
- next //Substitution(b, ref(prevSym))(next)
- )
- )
}
}
@@ -1431,12 +1428,12 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
case _ => or
}
- def resultInMonad = if (aligner.isBool) defn.UnitType else {
- val getTp = extractorMemberType(resultType, nme.get)
- if ((extractorMemberType(resultType, nme.isDefined) isRef defn.BooleanClass) && getTp.exists)
- getTp
+ def resultInMonad =
+ if (aligner.isBool) defn.UnitType
+ else if (isProductMatch(resultType, aligner.prodArity)) resultType
+ else if (isGetMatch(resultType)) extractorMemberType(resultType, nme.get)
else resultType
- }
+
def resultType: Type
/** Create the TreeMaker that embodies this extractor call
@@ -1632,13 +1629,12 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
//val spr = subPatRefs(binder)
assert(go && go1)
ref(binder) :: Nil
- } else {
- lazy val getTp = extractorMemberType(binderTypeTested, nme.get)
- if ((aligner.isSingle && aligner.extractor.prodArity == 1) && ((extractorMemberType(binderTypeTested, nme.isDefined) isRef defn.BooleanClass) && getTp.exists))
- List(ref(binder))
- else
- subPatRefs(binder)
}
+ else if ((aligner.isSingle && aligner.extractor.prodArity == 1) &&
+ !isProductMatch(binderTypeTested, aligner.prodArity) && isGetMatch(binderTypeTested))
+ List(ref(binder))
+ else
+ subPatRefs(binder)
}
/*protected def spliceApply(binder: Symbol): Tree = {
@@ -1890,9 +1886,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
else if (result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
else result.select(nme.get) :: Nil
)*/
- if ((extractorMemberType(resultType, nme.isDefined) isRef defn.BooleanClass) && resultOfGet.exists)
- getUnapplySelectors(resultOfGet, args)
- else if (defn.isProductSubType(resultType)) productSelectorTypes(resultType)
+ if (isProductMatch(resultType, args.length)) productSelectorTypes(resultType)
+ else if (isGetMatch(resultType)) getUnapplySelectors(resultOfGet, args)
else if (resultType isRef defn.BooleanClass) Nil
else {
ctx.error(i"invalid return type in Unapply node: $resultType")
diff --git a/compiler/src/dotty/tools/dotc/transform/ShortcutImplicits.scala b/compiler/src/dotty/tools/dotc/transform/ShortcutImplicits.scala
new file mode 100644
index 000000000..b5469610f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ShortcutImplicits.scala
@@ -0,0 +1,165 @@
+package dotty.tools.dotc
+package transform
+
+import TreeTransforms._
+import core.DenotTransformers.IdentityDenotTransformer
+import core.Symbols._
+import core.Contexts._
+import core.Types._
+import core.Flags._
+import core.Decorators._
+import core.StdNames.nme
+import core.Names._
+import core.NameOps._
+import ast.Trees._
+import ast.tpd
+import collection.mutable
+
+/** This phase optimizes code using implicit function types, by applying two rewrite rules.
+ * Let IF be the implicit function type
+ *
+ * implicit Us => R
+ *
+ * (1) A method definition
+ *
+ * def m(xs: Ts): IF = implicit (ys: Us) => E
+ *
+ * is expanded to two methods:
+ *
+ * def m(xs: Ts): IF = implicit (ys: Us) => m$direct(xs)(ys)
+ * def m$direct(xs: Ts)(ys: Us): R = E
+ *
+ * (and equivalently for methods with type parameters or a different number of value parameter lists).
+ * An abstract method definition
+ *
+ * def m(xs: Ts): IF
+ *
+ * is expanded to:
+ *
+ * def m(xs: Ts): IF
+ * def m$direct(xs: Ts)(ys: Us): R
+ *
+ * (2) A reference `qual.apply` where `qual` has implicit function type and
+ * `qual` refers to a method `m` is rewritten to a reference to `m$direct`,
+ * keeping the same type and value arguments as they are found in `qual`.
+ */
+class ShortcutImplicits extends MiniPhase with IdentityDenotTransformer { thisTransform =>
+ import tpd._
+
+ override def phaseName: String = "shortcutImplicits"
+ val treeTransform = new Transform
+
+ /** If this option is true, we don't specialize symbols that are known to be only
+ * targets of monomorphic calls.
+ * The reason for this option is that benchmarks show that on the JVM for monomorphic dispatch
+ * scenarios inlining and escape analysis can often remove all calling overhead, so we might as
+ * well not duplicate the code. We need more experience to decide on the best setting of this option.
+ */
+ final val specializeMonoTargets = true
+
+ class Transform extends TreeTransform {
+ def phase = thisTransform
+
+ override def prepareForUnit(tree: Tree)(implicit ctx: Context) = new Transform
+
+ /** A map to cache mapping local methods to their direct counterparts.
+ * A fresh map is created for each unit.
+ */
+ private val directMeth = new mutable.HashMap[Symbol, Symbol]
+
+ /** Should `sym` get a ..$direct companion?
+ * This is the case if (1) `sym` is a method with an implicit function type as final result type.
+ * However if `specializeMonoTargets` is false, we exclude symbols that are known
+ * to be only targets of monomorphic calls because they are effectively
+ * final and don't override anything.
+ */
+ private def shouldBeSpecialized(sym: Symbol)(implicit ctx: Context) =
+ sym.is(Method, butNot = Accessor) &&
+ defn.isImplicitFunctionType(sym.info.finalResultType) &&
+ (specializeMonoTargets || !sym.isEffectivelyFinal || sym.allOverriddenSymbols.nonEmpty)
+
+ /** @pre The type's final result type is an implicit function type `implicit Ts => R`.
+ * @return The type of the `apply` member of `implicit Ts => R`.
+ */
+ private def directInfo(info: Type)(implicit ctx: Context): Type = info match {
+ case info: PolyType => info.derivedPolyType(resType = directInfo(info.resultType))
+ case info: MethodType => info.derivedMethodType(resType = directInfo(info.resultType))
+ case info: ExprType => directInfo(info.resultType)
+ case info => info.member(nme.apply).info
+ }
+
+ /** A new `m$direct` method to accompany the given method `m` */
+ private def newDirectMethod(sym: Symbol)(implicit ctx: Context): Symbol = {
+ val direct = sym.copy(
+ name = sym.name.directName,
+ flags = sym.flags | Synthetic,
+ info = directInfo(sym.info))
+ if (direct.allOverriddenSymbols.isEmpty) direct.resetFlag(Override)
+ direct
+ }
+
+ /** The direct method `m$direct` that accompanies the given method `m`.
+ * Create one if it does not exist already.
+ */
+ private def directMethod(sym: Symbol)(implicit ctx: Context): Symbol =
+ if (sym.owner.isClass) {
+ val direct = sym.owner.info.member(sym.name.directName)
+ .suchThat(_.info matches directInfo(sym.info)).symbol
+ if (direct.maybeOwner == sym.owner) direct
+ else newDirectMethod(sym).enteredAfter(thisTransform)
+ }
+ else directMeth.getOrElseUpdate(sym, newDirectMethod(sym))
+
+
+ /** Transform `qual.apply` occurrences according to rewrite rule (2) above */
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) =
+ if (tree.name == nme.apply &&
+ defn.isImplicitFunctionType(tree.qualifier.tpe.widen) &&
+ shouldBeSpecialized(tree.qualifier.symbol)) {
+ def directQual(tree: Tree): Tree = tree match {
+ case Apply(fn, args) => cpy.Apply(tree)(directQual(fn), args)
+ case TypeApply(fn, args) => cpy.TypeApply(tree)(directQual(fn), args)
+ case Block(stats, expr) => cpy.Block(tree)(stats, directQual(expr))
+ case tree: RefTree =>
+ cpy.Ref(tree)(tree.name.directName)
+ .withType(directMethod(tree.symbol).termRef)
+ }
+ directQual(tree.qualifier)
+ } else tree
+
+ /** Transform methods with implicit function type result according to rewrite rule (1) above */
+ override def transformDefDef(mdef: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val original = mdef.symbol
+ if (shouldBeSpecialized(original)) {
+ val direct = directMethod(original)
+
+ def splitClosure(tree: Tree): (List[Type] => List[List[Tree]] => Tree, Tree) = tree match {
+ case Block(Nil, expr) => splitClosure(expr)
+ case Block((meth @ DefDef(nme.ANON_FUN, Nil, clparams :: Nil, _, _)) :: Nil, cl: Closure) =>
+ val tparamSyms = mdef.tparams.map(_.symbol)
+ val vparamSymss = mdef.vparamss.map(_.map(_.symbol))
+ val clparamSyms = clparams.map(_.symbol)
+ val remappedCore = (ts: List[Type]) => (prefss: List[List[Tree]]) =>
+ meth.rhs
+ .subst(tparamSyms ::: (vparamSymss.flatten ++ clparamSyms),
+ ts.map(_.typeSymbol) ::: prefss.flatten.map(_.symbol))
+ .changeOwnerAfter(original, direct, thisTransform)
+ .changeOwnerAfter(meth.symbol, direct, thisTransform)
+ val forwarder = ref(direct)
+ .appliedToTypeTrees(tparamSyms.map(ref(_)))
+ .appliedToArgss(vparamSymss.map(_.map(ref(_))) :+ clparamSyms.map(ref(_)))
+ val fwdClosure = cpy.Block(tree)(cpy.DefDef(meth)(rhs = forwarder) :: Nil, cl)
+ (remappedCore, fwdClosure)
+ case EmptyTree =>
+ (_ => _ => EmptyTree, EmptyTree)
+ }
+
+ val (remappedCore, fwdClosure) = splitClosure(mdef.rhs)
+ val originalDef = cpy.DefDef(mdef)(rhs = fwdClosure)
+ val directDef = polyDefDef(direct.asTerm, remappedCore)
+ Thicket(originalDef, directDef)
+ }
+ else mdef
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
index fea478c9b..3c11827fc 100644
--- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -71,21 +71,24 @@ class SuperAccessors(thisTransformer: DenotTransformer) {
val Select(qual, name) = sel
val sym = sel.symbol
val clazz = qual.symbol.asClass
- var supername = name.superName
- if (clazz is Trait) supername = supername.expandedName(clazz)
-
- val superAcc = clazz.info.decl(supername).suchThat(_.signature == sym.signature).symbol orElse {
- ctx.debuglog(s"add super acc ${sym.showLocated} to $clazz")
- val deferredOrPrivate = if (clazz is Trait) Deferred | ExpandedName else Private
- val acc = ctx.newSymbol(
- clazz, supername, SuperAccessor | Artifact | Method | deferredOrPrivate,
- sel.tpe.widenSingleton.ensureMethodic, coord = sym.coord).enteredAfter(thisTransformer)
- // Diagnostic for SI-7091
- if (!accDefs.contains(clazz))
- ctx.error(s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz is Package}. Accessor required for ${sel} (${sel.show})", sel.pos)
- else accDefs(clazz) += DefDef(acc, EmptyTree)
- acc
- }
+ var superName = name.superName
+ if (clazz is Trait) superName = superName.expandedName(clazz)
+ val superInfo = sel.tpe.widenSingleton.ensureMethodic
+
+ val superAcc = clazz.info.decl(superName)
+ .suchThat(_.signature == superInfo.signature).symbol
+ .orElse {
+ ctx.debuglog(s"add super acc ${sym.showLocated} to $clazz")
+ val deferredOrPrivate = if (clazz is Trait) Deferred | ExpandedName else Private
+ val acc = ctx.newSymbol(
+ clazz, superName, SuperAccessor | Artifact | Method | deferredOrPrivate,
+ superInfo, coord = sym.coord).enteredAfter(thisTransformer)
+ // Diagnostic for SI-7091
+ if (!accDefs.contains(clazz))
+ ctx.error(s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz is Package}. Accessor required for ${sel} (${sel.show})", sel.pos)
+ else accDefs(clazz) += DefDef(acc, EmptyTree)
+ acc
+ }
This(clazz).select(superAcc).withPos(sel.pos)
}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
index aa4eefe43..eee429a87 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -129,17 +129,20 @@ class TreeChecker extends Phase with SymTransformer {
try checker.typedExpr(ctx.compilationUnit.tpdTree)(checkingCtx)
catch {
case NonFatal(ex) => //TODO CHECK. Check that we are bootstrapped
- implicit val ctx: Context = checkingCtx
+ implicit val ctx = checkingCtx
println(i"*** error while checking ${ctx.compilationUnit} after phase ${checkingCtx.phase.prev} ***")
throw ex
}
}
- class Checker(phasesToCheck: Seq[Phase]) extends ReTyper {
+ class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking {
val nowDefinedSyms = new mutable.HashSet[Symbol]
val everDefinedSyms = new mutable.HashMap[Symbol, Tree]
+ // don't check value classes after typer, as the constraint about constructors doesn't hold after transform
+ override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) = ()
+
def withDefinedSym[T](tree: untpd.Tree)(op: => T)(implicit ctx: Context): T = tree match {
case tree: DefTree =>
val sym = tree.symbol
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala b/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala
index 5385ca720..b0bd40578 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeTransform.scala
@@ -3,7 +3,6 @@ package dotc
package transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Annotations.ConcreteAnnotation
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.DenotTransformers.{InfoTransformer, DenotTransformer}
import dotty.tools.dotc.core.Denotations.SingleDenotation
@@ -181,10 +180,15 @@ object TreeTransforms {
abstract override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation =
super.transform(ref) match {
case ref1: SymDenotation if ref1.symbol.isDefinedInCurrentRun =>
- val annotTrees = ref1.annotations.map(_.tree)
+ val annots = ref1.annotations
+ val annotTrees = annots.map(_.tree)
val annotTrees1 = annotTrees.mapConserve(annotationTransformer.macroTransform)
if (annotTrees eq annotTrees1) ref1
- else ref1.copySymDenotation(annotations = annotTrees1.map(new ConcreteAnnotation(_)))
+ else {
+ val derivedAnnots = (annots, annotTrees1).zipped.map((annot, annotTree1) =>
+ annot.derivedAnnotation(annotTree1))
+ ref1.copySymDenotation(annotations = derivedAnnots)
+ }
case ref1 =>
ref1
}
diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
index 93005c57a..b16d05644 100644
--- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
@@ -53,4 +53,14 @@ object ValueClasses {
def underlyingOfValueClass(d: ClassDenotation)(implicit ctx: Context): Type =
valueClassUnbox(d).info.resultType
+ /** Whether a value class wraps itself */
+ def isCyclic(cls: ClassSymbol)(implicit ctx: Context): Boolean = {
+ def recur(seen: Set[Symbol], clazz: ClassSymbol)(implicit ctx: Context): Boolean =
+ (seen contains clazz) || {
+ val unboxed = underlyingOfValueClass(clazz).typeSymbol
+ (isDerivedValueClass(unboxed)) && recur(seen + clazz, unboxed.asClass)
+ }
+
+ recur(Set[Symbol](), cls)
+ }
}
diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala
index 6c398cd72..42c24ffb7 100644
--- a/compiler/src/dotty/tools/dotc/typer/Applications.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala
@@ -32,16 +32,37 @@ import reporting.diagnostic.Message
object Applications {
import tpd._
+ def extractorMember(tp: Type, name: Name)(implicit ctx: Context) = {
+ def isPossibleExtractorType(tp: Type) = tp match {
+ case _: MethodType | _: PolyType => false
+ case _ => true
+ }
+ tp.member(name).suchThat(d => isPossibleExtractorType(d.info))
+ }
+
def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx: Context) = {
- val ref = tp.member(name).suchThat(_.info.isParameterless)
+ val ref = extractorMember(tp, name)
if (ref.isOverloaded)
errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos)
- else if (ref.info.isInstanceOf[PolyType])
- errorType(i"Reference to polymorphic $ref: ${ref.info} is not allowed in extractor", errorPos)
- else
- ref.info.widenExpr.dealias
+ ref.info.widenExpr.dealias
}
+ /** Does `tp` fit the "product match" conditions as an unapply result type
+ * for a pattern with `numArgs` subpatterns>
+ * This is the case of `tp` is a subtype of the Product<numArgs> class.
+ */
+ def isProductMatch(tp: Type, numArgs: Int)(implicit ctx: Context) =
+ 0 <= numArgs && numArgs <= Definitions.MaxTupleArity &&
+ tp.derivesFrom(defn.ProductNType(numArgs).typeSymbol)
+
+ /** Does `tp` fit the "get match" conditions as an unapply result type?
+ * This is the case of `tp` has a `get` member as well as a
+ * parameterless `isDefined` member of result type `Boolean`.
+ */
+ def isGetMatch(tp: Type, errorPos: Position = NoPosition)(implicit ctx: Context) =
+ extractorMemberType(tp, nme.isEmpty, errorPos).isRef(defn.BooleanClass) &&
+ extractorMemberType(tp, nme.get, errorPos).exists
+
def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
val sels = for (n <- Iterator.from(0)) yield extractorMemberType(tp, nme.selectorName(n), errorPos)
sels.takeWhile(_.exists).toList
@@ -61,24 +82,37 @@ object Applications {
def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
+ val unapplyName = unapplyFn.symbol.name
def seqSelector = defn.RepeatedParamType.appliedTo(unapplyResult.elemType :: Nil)
def getTp = extractorMemberType(unapplyResult, nme.get, pos)
- // println(s"unapply $unapplyResult ${extractorMemberType(unapplyResult, nme.isDefined)}")
- if (extractorMemberType(unapplyResult, nme.isDefined, pos) isRef defn.BooleanClass) {
- if (getTp.exists)
- if (unapplyFn.symbol.name == nme.unapplySeq) {
- val seqArg = boundsToHi(getTp.elemType)
- if (seqArg.exists) return args map Function.const(seqArg)
- }
- else return getUnapplySelectors(getTp, args, pos)
- else if (defn.isProductSubType(unapplyResult)) return productSelectorTypes(unapplyResult, pos)
+ def fail = {
+ ctx.error(i"$unapplyResult is not a valid result type of an $unapplyName method of an extractor", pos)
+ Nil
+ }
+
+ if (unapplyName == nme.unapplySeq) {
+ if (unapplyResult derivesFrom defn.SeqClass) seqSelector :: Nil
+ else if (isGetMatch(unapplyResult, pos)) {
+ val seqArg = boundsToHi(getTp.elemType)
+ if (seqArg.exists) args.map(Function.const(seqArg))
+ else fail
+ }
+ else fail
}
- if (unapplyResult derivesFrom defn.SeqClass) seqSelector :: Nil
- else if (unapplyResult isRef defn.BooleanClass) Nil
else {
- ctx.error(i"$unapplyResult is not a valid result type of an unapply method of an extractor", pos)
- Nil
+ assert(unapplyName == nme.unapply)
+ if (isProductMatch(unapplyResult, args.length))
+ productSelectorTypes(unapplyResult)
+ else if (isGetMatch(unapplyResult, pos))
+ getUnapplySelectors(getTp, args, pos)
+ else if (unapplyResult isRef defn.BooleanClass)
+ Nil
+ else if (defn.isProductSubType(unapplyResult))
+ productSelectorTypes(unapplyResult)
+ // this will cause a "wrong number of arguments in pattern" error later on,
+ // which is better than the message in `fail`.
+ else fail
}
}
@@ -250,8 +284,37 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
/** Splice new method reference into existing application */
def spliceMeth(meth: Tree, app: Tree): Tree = app match {
- case Apply(fn, args) => Apply(spliceMeth(meth, fn), args)
- case TypeApply(fn, targs) => TypeApply(spliceMeth(meth, fn), targs)
+ case Apply(fn, args) =>
+ spliceMeth(meth, fn).appliedToArgs(args)
+ case TypeApply(fn, targs) =>
+ // Note: It is important that the type arguments `targs` are passed in new trees
+ // instead of being spliced in literally. Otherwise, a type argument to a default
+ // method could be constructed as the definition site of the type variable for
+ // that default constructor. This would interpolate type variables too early,
+ // causing lots of tests (among them tasty_unpickleScala2) to fail.
+ //
+ // The test case is in i1757.scala. Here we have a variable `s` and a method `cpy`
+ // defined like this:
+ //
+ // var s
+ // def cpy[X](b: List[Int] = b): B[X] = new B[X](b)
+ //
+ // The call `s.cpy()` then gets expanded to
+ //
+ // { val $1$: B[Int] = this.s
+ // $1$.cpy[X']($1$.cpy$default$1[X']
+ // }
+ //
+ // A type variable gets interpolated if it does not appear in the type
+ // of the current tree and the current tree contains the variable's "definition".
+ // Previously, the polymorphic function tree to which the variable was first added
+ // was taken as the variable's definition. But that fails here because that
+ // tree was `s.cpy` but got transformed into `$1$.cpy`. We now take the type argument
+ // [X'] of the variable as its definition tree, which is more robust. But then
+ // it's crucial that the type tree is not copied directly as argument to
+ // `cpy$default$1`. If it was, the variable `X'` would already be interpolated
+ // when typing the default argument, which is too early.
+ spliceMeth(meth, fn).appliedToTypes(targs.tpes)
case _ => meth
}
@@ -333,7 +396,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
val getter = findDefaultGetter(n + numArgs(normalizedFun))
if (getter.isEmpty) missingArg(n)
else {
- addTyped(treeToArg(spliceMeth(getter withPos appPos, normalizedFun)), formal)
+ addTyped(treeToArg(spliceMeth(getter withPos normalizedFun.pos, normalizedFun)), formal)
matchArgs(args1, formals1, n + 1)
}
}
@@ -498,7 +561,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
var typedArgs = typedArgBuf.toList
def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later
val app1 =
- if (!success) app0.withType(ErrorType)
+ if (!success) app0.withType(UnspecifiedErrorType)
else {
if (!sameSeq(args, orderedArgs)) {
// need to lift arguments to maintain evaluation order in the
@@ -591,21 +654,23 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
}
fun1.tpe match {
- case ErrorType => untpd.cpy.Apply(tree)(fun1, tree.args).withType(ErrorType)
+ case err: ErrorType => untpd.cpy.Apply(tree)(fun1, tree.args).withType(err)
case TryDynamicCallType => typedDynamicApply(tree, pt)
case _ =>
- tryEither {
- implicit ctx => simpleApply(fun1, proto)
- } {
- (failedVal, failedState) =>
- def fail = { failedState.commit(); failedVal }
- // Try once with original prototype and once (if different) with tupled one.
- // The reason we need to try both is that the decision whether to use tupled
- // or not was already taken but might have to be revised when an implicit
- // is inserted on the qualifier.
- tryWithImplicitOnQualifier(fun1, originalProto).getOrElse(
- if (proto eq originalProto) fail
- else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail))
+ if (originalProto.isDropped) fun1
+ else
+ tryEither {
+ implicit ctx => simpleApply(fun1, proto)
+ } {
+ (failedVal, failedState) =>
+ def fail = { failedState.commit(); failedVal }
+ // Try once with original prototype and once (if different) with tupled one.
+ // The reason we need to try both is that the decision whether to use tupled
+ // or not was already taken but might have to be revised when an implicit
+ // is inserted on the qualifier.
+ tryWithImplicitOnQualifier(fun1, originalProto).getOrElse(
+ if (proto eq originalProto) fail
+ else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail))
}
}
}
@@ -855,7 +920,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
case tp =>
val unapplyErr = if (tp.isError) unapplyFn else notAnExtractor(unapplyFn)
val typedArgsErr = args mapconserve (typed(_, defn.AnyType))
- cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType ErrorType
+ cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType unapplyErr.tpe
}
}
@@ -912,9 +977,21 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
}
/** In a set of overloaded applicable alternatives, is `alt1` at least as good as
- * `alt2`? `alt1` and `alt2` are non-overloaded references.
+ * `alt2`? Also used for implicits disambiguation.
+ *
+ * @param alt1, alt2 Non-overloaded references indicating the two choices
+ * @param level1, level2 If alternatives come from a comparison of two contextual
+ * implicit candidates, the nesting levels of the candidates.
+ * In all other cases the nesting levels are both 0.
+ *
+ * An alternative A1 is "as good as" an alternative A2 if it wins or draws in a tournament
+ * that awards one point for each of the following
+ *
+ * - A1 is nested more deeply than A2
+ * - The nesting levels of A1 and A2 are the same, and A1's owner derives from A2's owner
+ * - A1's type is more specific than A2's type.
*/
- def isAsGood(alt1: TermRef, alt2: TermRef)(implicit ctx: Context): Boolean = track("isAsGood") { ctx.traceIndented(i"isAsGood($alt1, $alt2)", overload) {
+ def isAsGood(alt1: TermRef, alt2: TermRef, nesting1: Int = 0, nesting2: Int = 0)(implicit ctx: Context): Boolean = track("isAsGood") { ctx.traceIndented(i"isAsGood($alt1, $alt2)", overload) {
assert(alt1 ne alt2)
@@ -963,7 +1040,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
val nestedCtx = ctx.fresh.setExploreTyperState
{
- implicit val ctx: Context = nestedCtx
+ implicit val ctx = nestedCtx
isAsSpecificValueType(tp1, constrained(tp2).resultType)
}
case _ => // (3b)
@@ -1029,9 +1106,9 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
val tp1 = stripImplicit(alt1.widen)
val tp2 = stripImplicit(alt2.widen)
- def winsOwner1 = isDerived(owner1, owner2)
+ def winsOwner1 = nesting1 > nesting2 || isDerived(owner1, owner2)
def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2)
- def winsOwner2 = isDerived(owner2, owner1)
+ def winsOwner2 = nesting2 > nesting1 || isDerived(owner2, owner1)
def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1)
overload.println(i"isAsGood($alt1, $alt2)? $tp1 $tp2 $winsOwner1 $winsType1 $winsOwner2 $winsType2")
@@ -1231,7 +1308,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
val alts1 = alts filter pt.isMatchedBy
resolveOverloaded(alts1, pt1, targs1)
- case defn.FunctionOf(args, resultType) =>
+ case defn.FunctionOf(args, resultType, _) =>
narrowByTypes(alts, args, resultType)
case pt =>
@@ -1282,7 +1359,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
// (p_1_1, ..., p_m_1) => r_1
// ...
// (p_1_n, ..., p_m_n) => r_n
- val decomposedFormalsForArg: List[Option[(List[Type], Type)]] =
+ val decomposedFormalsForArg: List[Option[(List[Type], Type, Boolean)]] =
formalsForArg.map(defn.FunctionOf.unapply)
if (decomposedFormalsForArg.forall(_.isDefined)) {
val formalParamTypessForArg: List[List[Type]] =
diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala
index 0c5a8e5db..41d9f9572 100644
--- a/compiler/src/dotty/tools/dotc/typer/Checking.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala
@@ -29,6 +29,7 @@ import ErrorReporting.{err, errorType}
import config.Printers.typr
import collection.mutable
import SymDenotations.NoCompleter
+import dotty.tools.dotc.transform.ValueClasses._
object Checking {
import tpd._
@@ -55,8 +56,8 @@ object Checking {
def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit =
checkBounds(args, poly.paramBounds, _.substParams(poly, _))
- /** Check applied type trees for well-formedness. This means
- * - all arguments are within their corresponding bounds
+ /** Check applied type trees for well-formedness. This means
+ * - all arguments are within their corresponding bounds
* - if type is a higher-kinded application with wildcard arguments,
* check that it or one of its supertypes can be reduced to a normal application.
* Unreducible applications correspond to general existentials, and we
@@ -88,12 +89,12 @@ object Checking {
checkWildcardHKApply(tp.superType, pos)
}
case _ =>
- }
+ }
def checkValidIfHKApply(implicit ctx: Context): Unit =
checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos)
checkValidIfHKApply(ctx.addMode(Mode.AllowLambdaWildcardApply))
}
-
+
/** Check that `tp` refers to a nonAbstract class
* and that the instance conforms to the self type of the created class.
*/
@@ -236,8 +237,7 @@ object Checking {
catch {
case ex: CyclicReference =>
if (reportErrors) {
- ctx.error(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.pos)
- ErrorType
+ errorType(i"illegal cyclic reference: ${checker.where} ${checker.lastChecked} of $sym refers back to the type itself", sym.pos)
}
else info
}
@@ -406,6 +406,43 @@ object Checking {
notPrivate.errors.foreach { case (msg, pos) => ctx.errorOrMigrationWarning(msg, pos) }
info
}
+
+ /** Verify classes extending AnyVal meet the requirements */
+ def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) = {
+ def checkValueClassMember(stat: Tree) = stat match {
+ case _: ValDef if !stat.symbol.is(ParamAccessor) =>
+ ctx.error(s"value class may not define non-parameter field", stat.pos)
+ case d: DefDef if d.symbol.isConstructor =>
+ ctx.error(s"value class may not define secondary constructor", stat.pos)
+ case _: MemberDef | _: Import | EmptyTree =>
+ // ok
+ case _ =>
+ ctx.error(s"value class may not contain initialization statements", stat.pos)
+ }
+ if (isDerivedValueClass(clazz)) {
+ if (clazz.is(Trait))
+ ctx.error("Only classes (not traits) are allowed to extend AnyVal", clazz.pos)
+ if (clazz.is(Abstract))
+ ctx.error("`abstract' modifier cannot be used with value classes", clazz.pos)
+ if (!clazz.isStatic)
+ ctx.error(s"value class may not be a ${if (clazz.owner.isTerm) "local class" else "member of another class"}", clazz.pos)
+ if (isCyclic(clazz.asClass))
+ ctx.error("value class cannot wrap itself", clazz.pos)
+ else {
+ val clParamAccessors = clazz.asClass.paramAccessors.filter(_.isTerm)
+ clParamAccessors match {
+ case List(param) =>
+ if (param.is(Mutable))
+ ctx.error("value class parameter must not be a var", param.pos)
+
+ case _ =>
+ ctx.error("value class needs to have exactly one val parameter", clazz.pos)
+ }
+ }
+ stats.foreach(checkValueClassMember)
+ }
+
+ }
}
trait Checking {
@@ -418,7 +455,7 @@ trait Checking {
/** Check that Java statics and packages can only be used in selections.
*/
def checkValue(tree: Tree, proto: Type)(implicit ctx: Context): tree.type = {
- if (!proto.isInstanceOf[SelectionProto]) {
+ if (!proto.isInstanceOf[SelectionProto] && !proto.isInstanceOf[ApplyingProto]) {
val sym = tree.tpe.termSymbol
// The check is avoided inside Java compilation units because it always fails
// on the singleton type Module.type.
@@ -553,6 +590,10 @@ trait Checking {
errorTree(tpt, ex"Singleton type ${tpt.tpe} is not allowed $where")
}
else tpt
+
+ /** Verify classes extending AnyVal meet the requirements */
+ def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) =
+ Checking.checkDerivedValueClass(clazz, stats)
}
trait NoChecking extends Checking {
@@ -568,4 +609,5 @@ trait NoChecking extends Checking {
override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = ()
override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt
override def checkNotSingleton(tpt: Tree, where: String)(implicit ctx: Context): Tree = tpt
+ override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) = ()
}
diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
index b5ace87d3..4039c8b81 100644
--- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -11,6 +11,7 @@ import dotty.tools.dotc.core.Names.Name
import dotty.tools.dotc.core.StdNames._
import dotty.tools.dotc.core.Types._
import dotty.tools.dotc.core.Decorators._
+import ErrorReporting._
object Dynamic {
def isDynamicMethod(name: Name): Boolean =
@@ -41,10 +42,9 @@ trait Dynamic { self: Typer with Applications =>
def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false }
val args = tree.args
val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic
- if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) {
- ctx.error("applyDynamicNamed does not support passing a vararg parameter", tree.pos)
- tree.withType(ErrorType)
- } else {
+ if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args))
+ errorTree(tree, "applyDynamicNamed does not support passing a vararg parameter")
+ else {
def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg))
def namedArgs = args.map {
case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg)
@@ -89,8 +89,7 @@ trait Dynamic { self: Typer with Applications =>
case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
typedDynamicAssign(qual, name, targs)
case _ =>
- ctx.error("reassignment to val", tree.pos)
- tree.withType(ErrorType)
+ errorTree(tree, "reassignment to val")
}
}
diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
index a18c83ff8..1238ad568 100644
--- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -23,7 +23,7 @@ object ErrorReporting {
def errorType(msg: => Message, pos: Position)(implicit ctx: Context): ErrorType = {
ctx.error(msg, pos)
- ErrorType
+ new ErrorType(msg)
}
def cyclicErrorMsg(ex: CyclicReference)(implicit ctx: Context) = {
@@ -46,11 +46,17 @@ object ErrorReporting {
errorMsg(msg, cx.outer)
}
} else msg
- errorMsg(ex.show, ctx)
+
+ if (cycleSym.is(Implicit, butNot = Method) && cycleSym.owner.isTerm)
+ em"""cyclic reference involving implicit $cycleSym
+ |This happens when the right hand-side of $cycleSym's definition involves an implicit search.
+ |To avoid the error, give $cycleSym an explicit type."""
+ else
+ errorMsg(ex.show, ctx)
}
- def wrongNumberOfArgs(fntpe: Type, kind: String, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree], pos: Position)(implicit ctx: Context) =
- errorType(WrongNumberOfArgs(fntpe, kind, expectedArgs, actual)(ctx), pos)
+ def wrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[TypeParamInfo], actual: List[untpd.Tree], pos: Position)(implicit ctx: Context) =
+ errorType(WrongNumberOfTypeArgs(fntpe, expectedArgs, actual)(ctx), pos)
class Errors(implicit ctx: Context) {
diff --git a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
index c444631ae..6eff63e2b 100644
--- a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -19,6 +19,15 @@ class FrontEnd extends Phase {
override def isTyper = true
import ast.tpd
+ /** The contexts for compilation units that are parsed but not yet entered */
+ private var remaining: List[Context] = Nil
+
+ /** Does a source file ending with `<name>.scala` belong to a compilation unit
+ * that is parsed but not yet entered?
+ */
+ def stillToBeEntered(name: String): Boolean =
+ remaining.exists(_.compilationUnit.toString.endsWith(name + ".scala"))
+
def monitor(doing: String)(body: => Unit)(implicit ctx: Context) =
try body
catch {
@@ -44,6 +53,12 @@ class FrontEnd extends Phase {
typr.println("entered: " + unit.source)
}
+ def enterAnnotations(implicit ctx: Context) = monitor("annotating") {
+ val unit = ctx.compilationUnit
+ ctx.typer.annotate(unit.untpdTree :: Nil)
+ typr.println("annotated: " + unit.source)
+ }
+
def typeCheck(implicit ctx: Context) = monitor("typechecking") {
val unit = ctx.compilationUnit
unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree)
@@ -69,8 +84,13 @@ class FrontEnd extends Phase {
}
unitContexts foreach (parse(_))
record("parsedTrees", ast.Trees.ntrees)
- unitContexts foreach (enterSyms(_))
- unitContexts foreach (typeCheck(_))
+ remaining = unitContexts
+ while (remaining.nonEmpty) {
+ enterSyms(remaining.head)
+ remaining = remaining.tail
+ }
+ unitContexts.foreach(enterAnnotations(_))
+ unitContexts.foreach(typeCheck(_))
record("total trees after typer", ast.Trees.ntrees)
unitContexts.map(_.compilationUnit).filterNot(discardAfterTyper)
}
diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
index f3dceea71..303953e73 100644
--- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
@@ -24,16 +24,20 @@ import Constants._
import Applications._
import ProtoTypes._
import ErrorReporting._
+import reporting.diagnostic.MessageContainer
import Inferencing.fullyDefinedType
import Trees._
import Hashable._
import config.Config
-import config.Printers.{implicits, implicitsDetailed}
+import config.Printers.{implicits, implicitsDetailed, typr}
import collection.mutable
/** Implicit resolution */
object Implicits {
+ /** An eligible implicit candidate, consisting of an implicit reference and a nesting level */
+ case class Candidate(ref: TermRef, level: Int)
+
/** A common base class of contextual implicits and of-type implicits which
* represents a set of implicit references.
*/
@@ -41,11 +45,14 @@ object Implicits {
implicit val ctx: Context =
if (initctx == NoContext) initctx else initctx retractMode Mode.ImplicitsEnabled
+ /** The nesting level of this context. Non-zero only in ContextialImplicits */
+ def level: Int = 0
+
/** The implicit references */
def refs: List[TermRef]
/** Return those references in `refs` that are compatible with type `pt`. */
- protected def filterMatching(pt: Type)(implicit ctx: Context): List[TermRef] = track("filterMatching") {
+ protected def filterMatching(pt: Type)(implicit ctx: Context): List[Candidate] = track("filterMatching") {
def refMatches(ref: TermRef)(implicit ctx: Context) = /*ctx.traceIndented(i"refMatches $ref $pt")*/ {
@@ -96,8 +103,9 @@ object Implicits {
}
}
- if (refs.isEmpty) refs
- else refs filter (refMatches(_)(ctx.fresh.addMode(Mode.TypevarsMissContext).setExploreTyperState)) // create a defensive copy of ctx to avoid constraint pollution
+ if (refs.isEmpty) Nil
+ else refs.filter(refMatches(_)(ctx.fresh.addMode(Mode.TypevarsMissContext).setExploreTyperState)) // create a defensive copy of ctx to avoid constraint pollution
+ .map(Candidate(_, level))
}
}
@@ -113,8 +121,8 @@ object Implicits {
buf.toList
}
- /** The implicit references that are eligible for expected type `tp` */
- lazy val eligible: List[TermRef] =
+ /** The candidates that are eligible for expected type `tp` */
+ lazy val eligible: List[Candidate] =
/*>|>*/ track("eligible in tpe") /*<|<*/ {
/*>|>*/ ctx.traceIndented(i"eligible($tp), companions = ${companionRefs.toList}%, %", implicitsDetailed, show = true) /*<|<*/ {
if (refs.nonEmpty && monitored) record(s"check eligible refs in tpe", refs.length)
@@ -134,10 +142,21 @@ object Implicits {
* @param outerCtx the next outer context that makes visible further implicits
*/
class ContextualImplicits(val refs: List[TermRef], val outerImplicits: ContextualImplicits)(initctx: Context) extends ImplicitRefs(initctx) {
- private val eligibleCache = new mutable.AnyRefMap[Type, List[TermRef]]
+ private val eligibleCache = new mutable.AnyRefMap[Type, List[Candidate]]
+
+ /** The level increases if current context has a different owner or scope than
+ * the context of the next-outer ImplicitRefs. This is however disabled under
+ * Scala2 mode, since we do not want to change the implicit disambiguation then.
+ */
+ override val level: Int =
+ if (outerImplicits == null) 1
+ else if (ctx.scala2Mode ||
+ (ctx.owner eq outerImplicits.ctx.owner) &&
+ (ctx.scope eq outerImplicits.ctx.scope)) outerImplicits.level
+ else outerImplicits.level + 1
/** The implicit references that are eligible for type `tp`. */
- def eligible(tp: Type): List[TermRef] = /*>|>*/ track(s"eligible in ctx") /*<|<*/ {
+ def eligible(tp: Type): List[Candidate] = /*>|>*/ track(s"eligible in ctx") /*<|<*/ {
if (tp.hash == NotCached) computeEligible(tp)
else eligibleCache get tp match {
case Some(eligibles) =>
@@ -161,13 +180,13 @@ object Implicits {
}
}
- private def computeEligible(tp: Type): List[TermRef] = /*>|>*/ ctx.traceIndented(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ {
+ private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ ctx.traceIndented(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ {
if (monitored) record(s"check eligible refs in ctx", refs.length)
val ownEligible = filterMatching(tp)
if (outerImplicits == NoContext.implicits) ownEligible
else ownEligible ::: {
- val shadowed = (ownEligible map (_.name)).toSet
- outerImplicits.eligible(tp) filterNot (ref => shadowed contains ref.name)
+ val shadowed = ownEligible.map(_.ref.name).toSet
+ outerImplicits.eligible(tp).filterNot(cand => shadowed.contains(cand.ref.name))
}
}
@@ -197,8 +216,8 @@ object Implicits {
* @param tree The typed tree that needs to be inserted
* @param ctx The context after the implicit search
*/
- case class SearchSuccess(tree: tpd.Tree, ref: TermRef, tstate: TyperState) extends SearchResult {
- override def toString = s"SearchSuccess($tree, $ref)"
+ case class SearchSuccess(tree: tpd.Tree, ref: TermRef, level: Int, tstate: TyperState) extends SearchResult {
+ override def toString = s"SearchSuccess($tree, $ref, $level)"
}
/** A failed search */
@@ -212,6 +231,8 @@ object Implicits {
/** A "no matching implicit found" failure */
case object NoImplicitMatches extends SearchFailure
+ case object DivergingImplicit extends SearchFailure
+
/** A search failure that can show information about the cause */
abstract class ExplainedSearchFailure extends SearchFailure {
protected def pt: Type
@@ -233,9 +254,35 @@ object Implicits {
"\n " + explanation
}
- class NonMatchingImplicit(ref: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
- def explanation(implicit ctx: Context): String =
- em"${err.refStr(ref)} does not $qualify"
+ class NonMatchingImplicit(ref: TermRef,
+ val pt: Type,
+ val argument: tpd.Tree,
+ trail: List[MessageContainer]) extends ExplainedSearchFailure {
+ private val separator = "\n**** because ****\n"
+
+ /** Replace repeated parts beginning with `separator` by ... */
+ private def elideRepeated(str: String): String = {
+ val startIdx = str.indexOfSlice(separator)
+ val nextIdx = str.indexOfSlice(separator, startIdx + separator.length)
+ if (nextIdx < 0) str
+ else {
+ val prefix = str.take(startIdx)
+ val first = str.slice(startIdx, nextIdx)
+ var rest = str.drop(nextIdx)
+ if (rest.startsWith(first)) {
+ rest = rest.drop(first.length)
+ val dots = "\n\n ...\n"
+ if (!rest.startsWith(dots)) rest = dots ++ rest
+ }
+ prefix ++ first ++ rest
+ }
+ }
+
+ def explanation(implicit ctx: Context): String = {
+ val headMsg = em"${err.refStr(ref)} does not $qualify"
+ val trailMsg = trail.map(mc => i"$separator ${mc.message}").mkString
+ elideRepeated(headMsg ++ trailMsg)
+ }
}
class ShadowedImplicit(ref: TermRef, shadowing: Type, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
@@ -273,9 +320,10 @@ trait ImplicitRunInfo { self: RunInfo =>
* a type variable, we need the current context, the current
* runinfo context does not do.
*/
- def implicitScope(tp: Type, liftingCtx: Context): OfTypeImplicits = {
+ def implicitScope(rootTp: Type, liftingCtx: Context): OfTypeImplicits = {
val seen: mutable.Set[Type] = mutable.Set()
+ val incomplete: mutable.Set[Type] = mutable.Set()
/** Replace every typeref that does not refer to a class by a conjunction of class types
* that has the same implicit scope as the original typeref. The motivation for applying
@@ -309,16 +357,23 @@ trait ImplicitRunInfo { self: RunInfo =>
}
}
- def iscopeRefs(tp: Type): TermRefSet =
- if (seen contains tp) EmptyTermRefSet
- else {
- seen += tp
- iscope(tp).companionRefs
- }
-
// todo: compute implicits directly, without going via companionRefs?
def collectCompanions(tp: Type): TermRefSet = track("computeImplicitScope") {
ctx.traceIndented(i"collectCompanions($tp)", implicits) {
+
+ def iscopeRefs(t: Type): TermRefSet = implicitScopeCache.get(t) match {
+ case Some(is) =>
+ is.companionRefs
+ case None =>
+ if (seen contains t) {
+ incomplete += tp // all references to rootTo will be accounted for in `seen` so we return `EmptySet`.
+ EmptyTermRefSet // on the other hand, the refs of `tp` are now not accurate, so `tp` is marked incomplete.
+ } else {
+ seen += t
+ iscope(t).companionRefs
+ }
+ }
+
val comps = new TermRefSet
tp match {
case tp: NamedType =>
@@ -356,7 +411,8 @@ trait ImplicitRunInfo { self: RunInfo =>
* @param isLifted Type `tp` is the result of a `liftToClasses` application
*/
def iscope(tp: Type, isLifted: Boolean = false): OfTypeImplicits = {
- def computeIScope(cacheResult: Boolean) = {
+ val canCache = Config.cacheImplicitScopes && tp.hash != NotCached
+ def computeIScope() = {
val savedEphemeral = ctx.typerState.ephemeral
ctx.typerState.ephemeral = false
try {
@@ -367,33 +423,23 @@ trait ImplicitRunInfo { self: RunInfo =>
else
collectCompanions(tp)
val result = new OfTypeImplicits(tp, refs)(ctx)
- if (ctx.typerState.ephemeral) record("ephemeral cache miss: implicitScope")
- else if (cacheResult) implicitScopeCache(tp) = result
+ if (ctx.typerState.ephemeral)
+ record("ephemeral cache miss: implicitScope")
+ else if (canCache &&
+ ((tp eq rootTp) || // first type traversed is always cached
+ !incomplete.contains(tp) && // other types are cached if they are not incomplete
+ result.companionRefs.forall( // and all their companion refs are cached
+ implicitScopeCache.contains)))
+ implicitScopeCache(tp) = result
result
}
finally ctx.typerState.ephemeral |= savedEphemeral
}
-
- if (tp.hash == NotCached || !Config.cacheImplicitScopes)
- computeIScope(cacheResult = false)
- else implicitScopeCache get tp match {
- case Some(is) => is
- case None =>
- // Implicit scopes are tricky to cache because of loops. For example
- // in `tests/pos/implicit-scope-loop.scala`, the scope of B contains
- // the scope of A which contains the scope of B. We break the loop
- // by returning EmptyTermRefSet in `collectCompanions` for types
- // that we have already seen, but this means that we cannot cache
- // the computed scope of A, it is incomplete.
- // Keeping track of exactly where these loops happen would require a
- // lot of book-keeping, instead we choose to be conservative and only
- // cache scopes before any type has been seen. This is unfortunate
- // because loops are very common for types in scala.collection.
- computeIScope(cacheResult = seen.isEmpty)
- }
+ if (canCache) implicitScopeCache.getOrElse(tp, computeIScope())
+ else computeIScope()
}
- iscope(tp)
+ iscope(rootTp)
}
/** A map that counts the number of times an implicit ref was picked */
@@ -450,7 +496,7 @@ trait Implicits { self: Typer =>
*/
def inferImplicitArg(formal: Type, error: (String => String) => Unit, pos: Position)(implicit ctx: Context): Tree =
inferImplicit(formal, EmptyTree, pos) match {
- case SearchSuccess(arg, _, _) =>
+ case SearchSuccess(arg, _, _, _) =>
arg
case ambi: AmbiguousImplicits =>
error(where => s"ambiguous implicits: ${ambi.explanation} of $where")
@@ -496,6 +542,15 @@ trait Implicits { self: Typer =>
}
private def assumedCanEqual(ltp: Type, rtp: Type)(implicit ctx: Context) = {
+ def eqNullable: Boolean = {
+ val other =
+ if (ltp.isRef(defn.NullClass)) rtp
+ else if (rtp.isRef(defn.NullClass)) ltp
+ else NoType
+
+ (other ne NoType) && !other.derivesFrom(defn.AnyValClass)
+ }
+
val lift = new TypeMap {
def apply(t: Type) = t match {
case t: TypeRef =>
@@ -507,7 +562,7 @@ trait Implicits { self: Typer =>
if (variance > 0) mapOver(t) else t
}
}
- ltp.isError || rtp.isError || ltp <:< lift(rtp) || rtp <:< lift(ltp)
+ ltp.isError || rtp.isError || ltp <:< lift(rtp) || rtp <:< lift(ltp) || eqNullable
}
/** Check that equality tests between types `ltp` and `rtp` make sense */
@@ -587,18 +642,19 @@ trait Implicits { self: Typer =>
val wildProto = implicitProto(pt, wildApprox(_))
/** Search failures; overridden in ExplainedImplicitSearch */
- protected def nonMatchingImplicit(ref: TermRef): SearchFailure = NoImplicitMatches
+ protected def nonMatchingImplicit(ref: TermRef, trail: List[MessageContainer]): SearchFailure = NoImplicitMatches
protected def divergingImplicit(ref: TermRef): SearchFailure = NoImplicitMatches
protected def shadowedImplicit(ref: TermRef, shadowing: Type): SearchFailure = NoImplicitMatches
protected def failedSearch: SearchFailure = NoImplicitMatches
/** Search a list of eligible implicit references */
- def searchImplicits(eligible: List[TermRef], contextual: Boolean): SearchResult = {
+ def searchImplicits(eligible: List[Candidate], contextual: Boolean): SearchResult = {
val constr = ctx.typerState.constraint
/** Try to typecheck an implicit reference */
- def typedImplicit(ref: TermRef)(implicit ctx: Context): SearchResult = track("typedImplicit") { ctx.traceIndented(i"typed implicit $ref, pt = $pt, implicitsEnabled == ${ctx.mode is ImplicitsEnabled}", implicits, show = true) {
+ def typedImplicit(cand: Candidate)(implicit ctx: Context): SearchResult = track("typedImplicit") { ctx.traceIndented(i"typed implicit ${cand.ref}, pt = $pt, implicitsEnabled == ${ctx.mode is ImplicitsEnabled}", implicits, show = true) {
assert(constr eq ctx.typerState.constraint)
+ val ref = cand.ref
var generated: Tree = tpd.ref(ref).withPos(pos)
if (!argument.isEmpty)
generated = typedUnadapted(
@@ -622,13 +678,14 @@ trait Implicits { self: Typer =>
case result: AmbiguousImplicits => true
case _ => false
}
+
def validEqAnyArgs(tp1: Type, tp2: Type) = {
List(tp1, tp2).foreach(fullyDefinedType(_, "eqAny argument", pos))
assumedCanEqual(tp1, tp2) || !hasEq(tp1) && !hasEq(tp2) ||
{ implicits.println(i"invalid eqAny[$tp1, $tp2]"); false }
}
if (ctx.reporter.hasErrors)
- nonMatchingImplicit(ref)
+ nonMatchingImplicit(ref, ctx.reporter.removeBufferedMessages)
else if (contextual && !ctx.mode.is(Mode.ImplicitShadowing) &&
!shadowing.tpe.isError && !refMatches(shadowing)) {
implicits.println(i"SHADOWING $ref in ${ref.termSymbol.owner} is shadowed by $shadowing in ${shadowing.symbol.owner}")
@@ -637,9 +694,9 @@ trait Implicits { self: Typer =>
else generated1 match {
case TypeApply(fn, targs @ (arg1 :: arg2 :: Nil))
if fn.symbol == defn.Predef_eqAny && !validEqAnyArgs(arg1.tpe, arg2.tpe) =>
- nonMatchingImplicit(ref)
+ nonMatchingImplicit(ref, Nil)
case _ =>
- SearchSuccess(generated1, ref, ctx.typerState)
+ SearchSuccess(generated1, ref, cand.level, ctx.typerState)
}
}}
@@ -648,19 +705,20 @@ trait Implicits { self: Typer =>
* @param pending The list of implicit references that remain to be investigated
* @param acc An accumulator of successful matches found so far.
*/
- def rankImplicits(pending: List[TermRef], acc: List[SearchSuccess]): List[SearchSuccess] = pending match {
- case ref :: pending1 =>
+ def rankImplicits(pending: List[Candidate], acc: List[SearchSuccess]): List[SearchSuccess] = pending match {
+ case cand :: pending1 =>
val history = ctx.searchHistory nest wildProto
val result =
- if (history eq ctx.searchHistory) divergingImplicit(ref)
- else typedImplicit(ref)(nestedContext.setNewTyperState.setSearchHistory(history))
+ if (history eq ctx.searchHistory) divergingImplicit(cand.ref)
+ else typedImplicit(cand)(nestedContext.setNewTyperState.setSearchHistory(history))
result match {
case fail: SearchFailure =>
rankImplicits(pending1, acc)
case best: SearchSuccess =>
if (ctx.mode.is(Mode.ImplicitExploration)) best :: Nil
else {
- val newPending = pending1 filter (isAsGood(_, best.ref)(nestedContext.setExploreTyperState))
+ val newPending = pending1.filter(cand1 =>
+ isAsGood(cand1.ref, best.ref, cand1.level, best.level)(nestedContext.setExploreTyperState))
rankImplicits(newPending, best :: acc)
}
}
@@ -689,8 +747,9 @@ trait Implicits { self: Typer =>
/** Convert a (possibly empty) list of search successes into a single search result */
def condense(hits: List[SearchSuccess]): SearchResult = hits match {
case best :: alts =>
- alts find (alt => isAsGood(alt.ref, best.ref)(ctx.fresh.setExploreTyperState)) match {
+ alts find (alt => isAsGood(alt.ref, best.ref, alt.level, best.level)(ctx.fresh.setExploreTyperState)) match {
case Some(alt) =>
+ typr.println(i"ambiguous implicits for $pt: ${best.ref} @ ${best.level}, ${alt.ref} @ ${alt.level}")
/* !!! DEBUG
println(i"ambiguous refs: ${hits map (_.ref) map (_.show) mkString ", "}")
isAsGood(best.ref, alt.ref, explain = true)(ctx.fresh.withExploreTyperState)
@@ -707,16 +766,18 @@ trait Implicits { self: Typer =>
failedSearch
}
+ def ranking(cand: Candidate) = -ctx.runInfo.useCount(cand.ref)
+
/** Sort list of implicit references according to their popularity
* (# of times each was picked in current run).
*/
- def sort(eligible: List[TermRef]) = eligible match {
+ def sort(eligible: List[Candidate]) = eligible match {
case Nil => eligible
case e1 :: Nil => eligible
case e1 :: e2 :: Nil =>
- if (ctx.runInfo.useCount(e1) < ctx.runInfo.useCount(e2)) e2 :: e1 :: Nil
+ if (ranking(e2) < ranking(e1)) e2 :: e1 :: Nil
else eligible
- case _ => eligible.sortBy(-ctx.runInfo.useCount(_))
+ case _ => eligible.sortBy(ranking)
}
condense(rankImplicits(sort(eligible), Nil))
@@ -743,8 +804,8 @@ trait Implicits { self: Typer =>
fail
}
def failures = myFailures.toList
- override def nonMatchingImplicit(ref: TermRef) =
- record(new NonMatchingImplicit(ref, pt, argument))
+ override def nonMatchingImplicit(ref: TermRef, trail: List[MessageContainer]) =
+ record(new NonMatchingImplicit(ref, pt, argument, trail))
override def divergingImplicit(ref: TermRef) =
record(new DivergingImplicit(ref, pt, argument))
override def shadowedImplicit(ref: TermRef, shadowing: Type): SearchFailure =
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
index 3aa289181..e44343e70 100644
--- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -95,14 +95,22 @@ class ImportInfo(symf: => Symbol, val selectors: List[untpd.Tree], val isRootImp
/** The root import symbol hidden by this symbol, or NoSymbol if no such symbol is hidden.
* Note: this computation needs to work even for un-initialized import infos, and
* is not allowed to force initialization.
+ *
+ * TODO: Once we have fully bootstrapped, I would prefer if we expressed
+ * unimport with an `override` modifier, and generalized it to all imports.
+ * I believe this would be more transparent than the current set of conditions. E.g.
+ *
+ * override import Predef.{any2stringAdd => _, StringAdd => _, _} // disables String +
+ * override import java.lang.{} // disables all imports
*/
- lazy val hiddenRoot: Symbol = {
- val sym = site.termSymbol
- def hasMaskingSelector = selectors exists {
+ lazy val unimported: Symbol = {
+ lazy val sym = site.termSymbol
+ val hasMaskingSelector = selectors exists {
case Thicket(_ :: Ident(nme.WILDCARD) :: Nil) => true
case _ => false
}
- if ((defn.RootImportTypes exists (_.symbol == sym)) && hasMaskingSelector) sym else NoSymbol
+ if (hasMaskingSelector && defn.RootImportTypes.exists(_.symbol == sym)) sym
+ else NoSymbol
}
override def toString = {
diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
index aede4974a..86649d78e 100644
--- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -119,6 +119,18 @@ object Inferencing {
}
}
+ /** If `tree` has a PolyType, infer its type parameters by comparing with expected type `pt` */
+ def inferTypeParams(tree: Tree, pt: Type)(implicit ctx: Context): Tree = tree.tpe match {
+ case poly: PolyType =>
+ val (poly1, tvars) = constrained(poly, tree)
+ val tree1 = tree.withType(poly1).appliedToTypeTrees(tvars)
+ tree1.tpe <:< pt
+ fullyDefinedType(tree1.tpe, "template parent", tree.pos)
+ tree1
+ case _ =>
+ tree
+ }
+
/** The list of uninstantiated type variables bound by some prefix of type `T` which
* occur in at least one formal parameter type of a prefix application.
* Considered prefixes are:
@@ -216,10 +228,10 @@ object Inferencing {
def interpolateUndetVars(tree: Tree, ownedBy: Symbol)(implicit ctx: Context): Unit = {
val constraint = ctx.typerState.constraint
val qualifies = (tvar: TypeVar) =>
- (tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy
+ (tree contains tvar.bindingTree) || ownedBy.exists && tvar.owner == ownedBy
def interpolate() = Stats.track("interpolateUndetVars") {
val tp = tree.tpe.widen
- constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}")
+ constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.bindingTree.pos}")}")
constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}")
val vs = variances(tp, qualifies)
diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
index 3931fcaf4..09487570d 100644
--- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
@@ -189,7 +189,7 @@ object Inliner {
if (!ctx.isAfterTyper) {
val inlineCtx = ctx
sym.updateAnnotation(LazyBodyAnnotation { _ =>
- implicit val ctx: Context = inlineCtx
+ implicit val ctx = inlineCtx
ctx.withNoError(treeExpr(ctx))(makeInlineable)
})
}
diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala
index 148cf1da7..1b6e437b5 100644
--- a/compiler/src/dotty/tools/dotc/typer/Namer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala
@@ -528,18 +528,96 @@ class Namer { typer: Typer =>
}
}
- stats foreach expand
+ stats.foreach(expand)
mergeCompanionDefs()
val ctxWithStats = (ctx /: stats) ((ctx, stat) => indexExpanded(stat)(ctx))
createCompanionLinks(ctxWithStats)
ctxWithStats
}
+ /** Add all annotations of definitions in `stats` to the defined symbols */
+ def annotate(stats: List[Tree])(implicit ctx: Context): Unit = {
+ def recur(stat: Tree): Unit = stat match {
+ case pcl: PackageDef =>
+ annotate(pcl.stats)
+ case stat: untpd.MemberDef =>
+ stat.getAttachment(SymOfTree) match {
+ case Some(sym) =>
+ sym.infoOrCompleter match {
+ case info: Completer if !defn.isPredefClass(sym.owner) =>
+ // Annotate Predef methods only when they are completed;
+ // This is necessary to break a cyclic dependence between `Predef`
+ // and `deprecated` in test `compileStdLib`.
+ addAnnotations(sym, stat)(info.creationContext)
+ case _ =>
+ // Annotations were already added as part of the symbol's completion
+ }
+ case none =>
+ assert(stat.typeOpt.exists, i"no symbol for $stat")
+ }
+ case stat: untpd.Thicket =>
+ stat.trees.foreach(recur)
+ case _ =>
+ }
+
+ for (stat <- stats) recur(expanded(stat))
+ }
+
+ /** Add annotations of `stat` to `sym`.
+ * This method can be called twice on a symbol (e.g. once
+ * during the `annotate` phase and then again during completion).
+ * Therefore, care needs to be taken not to add annotations again
+ * that are already added to the symbol.
+ */
+ def addAnnotations(sym: Symbol, stat: MemberDef)(implicit ctx: Context) = {
+ // (1) The context in which an annotation of a top-level class or module is evaluated
+ // is the closest enclosing context which has the enclosing package as owner.
+ // (2) The context in which an annotation for any other symbol is evaluated is the
+ // closest enclosing context which has the owner of the class enclosing the symbol as owner.
+ // E.g in
+ //
+ // package p
+ // import a.b
+ // class C {
+ // import d.e
+ // @ann m() ...
+ // }
+ //
+ // `@ann` is evaluated in the context just outside `C`, where the `a.b`
+ // import is visible but the `d.e` import is forgotten. This measure is necessary
+ // in order to avoid cycles.
+ lazy val annotCtx = {
+ var target = sym.owner.lexicallyEnclosingClass
+ if (!target.is(PackageClass)) target = target.owner
+ var c = ctx
+ while (c.owner != target) c = c.outer
+ c
+ }
+ for (annotTree <- untpd.modsDeco(stat).mods.annotations) {
+ val cls = typedAheadAnnotation(annotTree)(annotCtx)
+ if (sym.unforcedAnnotation(cls).isEmpty) {
+ val ann = Annotation.deferred(cls, implicit ctx => typedAnnotation(annotTree))
+ sym.addAnnotation(ann)
+ if (cls == defn.InlineAnnot && sym.is(Method, butNot = Accessor))
+ sym.setFlag(Inline)
+ }
+ }
+ }
+
+ def indexAndAnnotate(stats: List[Tree])(implicit ctx: Context): Context = {
+ val localCtx = index(stats)
+ annotate(stats)
+ localCtx
+ }
+
/** The completer of a symbol defined by a member def or import (except ClassSymbols) */
class Completer(val original: Tree)(implicit ctx: Context) extends LazyType {
protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original)
+ /** The context with which this completer was created */
+ def creationContext = ctx
+
protected def typeSig(sym: Symbol): Type = original match {
case original: ValDef =>
if (sym is Module) moduleValSig(sym)
@@ -572,19 +650,6 @@ class Namer { typer: Typer =>
completeInCreationContext(denot)
}
- protected def addAnnotations(denot: SymDenotation): Unit = original match {
- case original: untpd.MemberDef =>
- var hasInlineAnnot = false
- for (annotTree <- untpd.modsDeco(original).mods.annotations) {
- val cls = typedAheadAnnotation(annotTree)
- val ann = Annotation.deferred(cls, implicit ctx => typedAnnotation(annotTree))
- denot.addAnnotation(ann)
- if (cls == defn.InlineAnnot && denot.is(Method, butNot = Accessor))
- denot.setFlag(Inline)
- }
- case _ =>
- }
-
private def addInlineInfo(denot: SymDenotation) = original match {
case original: untpd.DefDef if denot.isInlineMethod =>
Inliner.registerInlineInfo(
@@ -598,7 +663,10 @@ class Namer { typer: Typer =>
* to pick up the context at the point where the completer was created.
*/
def completeInCreationContext(denot: SymDenotation): Unit = {
- addAnnotations(denot)
+ original match {
+ case original: MemberDef => addAnnotations(denot.symbol, original)
+ case _ =>
+ }
addInlineInfo(denot)
denot.info = typeSig(denot.symbol)
Checking.checkWellFormed(denot.symbol)
@@ -615,7 +683,7 @@ class Namer { typer: Typer =>
//println(i"completing type params of $sym in ${sym.owner}")
nestedCtx = localContext(sym).setNewScope
myTypeParams = {
- implicit val ctx: Context = nestedCtx
+ implicit val ctx = nestedCtx
val tparams = original.rhs match {
case PolyTypeTree(tparams, _) => tparams
case _ => Nil
@@ -742,7 +810,7 @@ class Namer { typer: Typer =>
ok
}
- addAnnotations(denot)
+ addAnnotations(denot.symbol, original)
val selfInfo =
if (self.isEmpty) NoType
@@ -765,9 +833,10 @@ class Namer { typer: Typer =>
// accessors, that's why the constructor needs to be completed before
// the parent types are elaborated.
index(constr)
+ annotate(constr :: params)
symbolOfTree(constr).ensureCompleted()
- index(rest)(inClassContext(selfInfo))
+ indexAndAnnotate(rest)(inClassContext(selfInfo))
val tparamAccessors = decls.filter(_ is TypeParamAccessor).toList
val parentTypes = ensureFirstIsClass(parents.map(checkedParentType(_, tparamAccessors)))
@@ -783,23 +852,23 @@ class Namer { typer: Typer =>
}
}
- /** Typecheck tree during completion, and remember result in typedtree map */
- private def typedAheadImpl(tree: Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
+ /** Typecheck `tree` during completion using `typed`, and remember result in TypedAhead map */
+ def typedAheadImpl(tree: Tree, typed: untpd.Tree => tpd.Tree)(implicit ctx: Context): tpd.Tree = {
val xtree = expanded(tree)
xtree.getAttachment(TypedAhead) match {
case Some(ttree) => ttree
case none =>
- val ttree = typer.typed(tree, pt)
- xtree.pushAttachment(TypedAhead, ttree)
+ val ttree = typed(tree)
+ xtree.putAttachment(TypedAhead, ttree)
ttree
}
}
def typedAheadType(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
- typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType addMode Mode.Type)
+ typedAheadImpl(tree, typer.typed(_, pt)(ctx retractMode Mode.PatternOrType addMode Mode.Type))
def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
- typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType)
+ typedAheadImpl(tree, typer.typed(_, pt)(ctx retractMode Mode.PatternOrType))
def typedAheadAnnotation(tree: Tree)(implicit ctx: Context): Symbol = tree match {
case Apply(fn, _) => typedAheadAnnotation(fn)
@@ -810,7 +879,7 @@ class Namer { typer: Typer =>
/** Enter and typecheck parameter list */
def completeParams(params: List[MemberDef])(implicit ctx: Context) = {
- index(params)
+ indexAndAnnotate(params)
for (param <- params) typedAheadExpr(param)
}
@@ -929,11 +998,16 @@ class Namer { typer: Typer =>
lhsType // keep constant types that fill in for a non-constant (to be revised when inline has landed).
else inherited
else {
- if (sym is Implicit) {
- val resStr = if (mdef.isInstanceOf[DefDef]) "result " else ""
- ctx.error(s"${resStr}type of implicit definition needs to be given explicitly", mdef.pos)
+ def missingType(modifier: String) = {
+ ctx.error(s"${modifier}type of implicit definition needs to be given explicitly", mdef.pos)
sym.resetFlag(Implicit)
}
+ if (sym is Implicit)
+ mdef match {
+ case _: DefDef => missingType("result")
+ case _: ValDef if sym.owner.isType => missingType("")
+ case _ =>
+ }
lhsType orElse WildcardType
}
}
@@ -990,7 +1064,7 @@ class Namer { typer: Typer =>
// 3. Info of CP is computed (to be copied to DP).
// 4. CP is completed.
// 5. Info of CP is copied to DP and DP is completed.
- index(tparams)
+ indexAndAnnotate(tparams)
if (isConstructor) sym.owner.typeParams.foreach(_.ensureCompleted())
for (tparam <- tparams) typedAheadExpr(tparam)
diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 9a20a452e..eb46a131f 100644
--- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -40,7 +40,9 @@ object ProtoTypes {
/** Test compatibility after normalization in a fresh typerstate. */
def normalizedCompatible(tp: Type, pt: Type)(implicit ctx: Context) = {
val nestedCtx = ctx.fresh.setExploreTyperState
- isCompatible(normalize(tp, pt)(nestedCtx), pt)(nestedCtx)
+ val normTp = normalize(tp, pt)(nestedCtx)
+ isCompatible(normTp, pt)(nestedCtx) ||
+ pt.isRef(defn.UnitClass) && normTp.isParameterless
}
private def disregardProto(pt: Type)(implicit ctx: Context): Boolean = pt.dealias match {
@@ -250,6 +252,22 @@ object ProtoTypes {
/** Somebody called the `tupled` method of this prototype */
def isTupled: Boolean = myTupled.isInstanceOf[FunProto]
+ /** If true, the application of this prototype was canceled. */
+ private var toDrop: Boolean = false
+
+ /** Cancel the application of this prototype. This can happen for a nullary
+ * application `f()` if `f` refers to a symbol that exists both in parameterless
+ * form `def f` and nullary method form `def f()`. A common example for such
+ * a method is `toString`. If in that case the type in the denotation is
+ * parameterless, we compensate by dropping the application.
+ */
+ def markAsDropped() = {
+ assert(args.isEmpty)
+ toDrop = true
+ }
+
+ def isDropped: Boolean = toDrop
+
override def toString = s"FunProto(${args mkString ","} => $resultType)"
def map(tm: TypeMap)(implicit ctx: Context): FunProto =
@@ -353,20 +371,23 @@ object ProtoTypes {
* Also, if `owningTree` is non-empty, add a type variable for each parameter.
* @return The added polytype, and the list of created type variables.
*/
- def constrained(pt: PolyType, owningTree: untpd.Tree)(implicit ctx: Context): (PolyType, List[TypeVar]) = {
+ def constrained(pt: PolyType, owningTree: untpd.Tree)(implicit ctx: Context): (PolyType, List[TypeTree]) = {
val state = ctx.typerState
assert(!(ctx.typerState.isCommittable && owningTree.isEmpty),
s"inconsistent: no typevars were added to committable constraint ${state.constraint}")
- def newTypeVars(pt: PolyType): List[TypeVar] =
+ def newTypeVars(pt: PolyType): List[TypeTree] =
for (n <- (0 until pt.paramNames.length).toList)
- yield new TypeVar(PolyParam(pt, n), state, owningTree, ctx.owner)
+ yield {
+ val tt = new TypeTree().withPos(owningTree.pos)
+ tt.withType(new TypeVar(PolyParam(pt, n), state, tt, ctx.owner))
+ }
val added =
if (state.constraint contains pt) pt.newLikeThis(pt.paramNames, pt.paramBounds, pt.resultType)
else pt
val tvars = if (owningTree.isEmpty) Nil else newTypeVars(added)
- ctx.typeComparer.addToConstraint(added, tvars)
+ ctx.typeComparer.addToConstraint(added, tvars.tpes.asInstanceOf[List[TypeVar]])
(added, tvars)
}
diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
index 2413c0c22..3252ead47 100644
--- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -72,9 +72,10 @@ class ReTyper extends Typer {
override def localTyper(sym: Symbol) = this
override def index(trees: List[untpd.Tree])(implicit ctx: Context) = ctx
+ override def annotate(trees: List[untpd.Tree])(implicit ctx: Context) = ()
- override def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: (Tree, TyperState) => Tree)(implicit ctx: Context): Tree =
- fallBack(tree, ctx.typerState)
+ override def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: => Tree)(implicit ctx: Context): Tree =
+ fallBack
override def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = ()
diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
index 46bdbf3b3..3192546cd 100644
--- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -18,7 +18,6 @@ import config.{ScalaVersion, NoScalaVersion}
import Decorators._
import typer.ErrorReporting._
import DenotTransformers._
-import ValueClasses.isDerivedValueClass
object RefChecks {
import tpd._
@@ -300,7 +299,9 @@ object RefChecks {
!member.isAnyOverride) {
// (*) Exclusion for default getters, fixes SI-5178. We cannot assign the Override flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
- if (autoOverride(member))
+ // Also excluded under Scala2 mode are overrides of default methods of Java traits.
+ if (autoOverride(member) ||
+ other.owner.is(JavaTrait) && ctx.testScala2Mode("`override' modifier required when a Java 8 default method is re-implemented", member.pos))
member.setFlag(Override)
else if (member.owner != clazz && other.owner != clazz && !(other.owner derivesFrom member.owner))
emitOverrideError(
@@ -327,7 +328,8 @@ object RefChecks {
overrideError("needs to be a stable, immutable value")
} else if (member.is(ModuleVal) && !other.isRealMethod && !other.is(Deferred | Lazy)) {
overrideError("may not override a concrete non-lazy value")
- } else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy)) {
+ } else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy) &&
+ !ctx.testScala2Mode("may not override a non-lazy value", member.pos)) {
overrideError("may not override a non-lazy value")
} else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) {
overrideError("must be declared lazy to override a lazy value")
@@ -688,39 +690,6 @@ object RefChecks {
}
}
- /** Verify classes extending AnyVal meet the requirements */
- private def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(implicit ctx: Context) = {
- def checkValueClassMember(stat: Tree) = stat match {
- case _: ValDef if !stat.symbol.is(ParamAccessor) =>
- ctx.error(s"value class may not define non-parameter field", stat.pos)
- case _: DefDef if stat.symbol.isConstructor =>
- ctx.error(s"value class may not define secondary constructor", stat.pos)
- case _: MemberDef | _: Import | EmptyTree =>
- // ok
- case _ =>
- ctx.error(s"value class may not contain initialization statements", stat.pos)
- }
- if (isDerivedValueClass(clazz)) {
- if (clazz.is(Trait))
- ctx.error("Only classes (not traits) are allowed to extend AnyVal", clazz.pos)
- if (clazz.is(Abstract))
- ctx.error("`abstract' modifier cannot be used with value classes", clazz.pos)
- if (!clazz.isStatic)
- ctx.error(s"value class may not be a ${if (clazz.owner.isTerm) "local class" else "member of another class"}", clazz.pos)
- else {
- val clParamAccessors = clazz.asClass.paramAccessors.filter(sym => sym.isTerm && !sym.is(Method))
- clParamAccessors match {
- case List(param) =>
- if (param.is(Mutable))
- ctx.error("value class parameter must not be a var", param.pos)
- case _ =>
- ctx.error("value class needs to have exactly one val parameter", clazz.pos)
- }
- }
- stats.foreach(checkValueClassMember)
- }
- }
-
type LevelAndIndex = immutable.Map[Symbol, (LevelInfo, Int)]
class OptLevelInfo extends DotClass {
@@ -836,7 +805,6 @@ class RefChecks extends MiniPhase { thisTransformer =>
checkParents(cls)
checkCompanionNameClashes(cls)
checkAllOverrides(cls)
- checkDerivedValueClass(cls, tree.body)
tree
} catch {
case ex: MergeError =>
diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
index ee2d68278..5c07b7bcf 100644
--- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -189,9 +189,8 @@ trait TypeAssigner {
val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else ""
val whyNot = new StringBuffer
alts foreach (_.isAccessibleFrom(pre, superAccess, whyNot))
- if (!tpe.isError)
- ctx.error(ex"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
- ErrorType
+ if (tpe.isError) tpe
+ else errorType(ex"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
}
}
else if (d.symbol is TypeParamAccessor)
@@ -215,17 +214,17 @@ trait TypeAssigner {
else if (site.derivesFrom(defn.DynamicClass) && !Dynamic.isDynamicMethod(name)) {
TryDynamicCallType
} else {
- if (!site.isErroneous) {
+ if (site.isErroneous) UnspecifiedErrorType
+ else {
def kind = if (name.isTypeName) "type" else "value"
def addendum =
if (site.derivesFrom(defn.DynamicClass)) "\npossible cause: maybe a wrong Dynamic method signature?"
else ""
- ctx.error(
+ errorType(
if (name == nme.CONSTRUCTOR) ex"$site does not have a constructor"
else NotAMember(site, name, kind),
pos)
}
- ErrorType
}
}
@@ -283,7 +282,9 @@ trait TypeAssigner {
def assignType(tree: untpd.This)(implicit ctx: Context) = {
val cls = qualifyingClass(tree, tree.qual.name, packageOK = false)
- tree.withType(cls.thisType)
+ tree.withType(
+ if (cls.isClass) cls.thisType
+ else errorType("not a legal qualifying class for this", tree.pos))
}
def assignType(tree: untpd.Super, qual: Tree, inConstrCall: Boolean, mixinClass: Symbol = NoSymbol)(implicit ctx: Context) = {
@@ -314,7 +315,8 @@ trait TypeAssigner {
val ownType = fn.tpe.widen match {
case fntpe @ MethodType(_, ptypes) =>
if (sameLength(ptypes, args) || ctx.phase.prev.relaxedTyping) fntpe.instantiate(args.tpes)
- else wrongNumberOfArgs(fn.tpe, "", fntpe.typeParams, args, tree.pos)
+ else
+ errorType(i"wrong number of arguments for $fntpe: ${fn.tpe}, expected: ${ptypes.length}, found: ${args.length}", tree.pos)
case t =>
errorType(i"${err.exprStr(fn)} does not take parameters", tree.pos)
}
@@ -369,7 +371,7 @@ trait TypeAssigner {
else {
val argTypes = args.tpes
if (sameLength(argTypes, paramNames) || ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
- else wrongNumberOfArgs(fn.tpe, "type", pt.typeParams, args, tree.pos)
+ else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.pos)
}
case _ =>
errorType(i"${err.exprStr(fn)} does not take type parameters", tree.pos)
@@ -462,7 +464,7 @@ trait TypeAssigner {
val ownType =
if (hasNamedArg(args)) (tycon.tpe /: args)(refineNamed)
else if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes)
- else wrongNumberOfArgs(tycon.tpe, "type", tparams, args, tree.pos)
+ else wrongNumberOfTypeArgs(tycon.tpe, tparams, args, tree.pos)
tree.withType(ownType)
}
diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala
index 9f5a942d6..a053a0b0d 100644
--- a/compiler/src/dotty/tools/dotc/typer/Typer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala
@@ -27,7 +27,7 @@ import EtaExpansion.etaExpand
import dotty.tools.dotc.transform.Erasure.Boxing
import util.Positions._
import util.common._
-import util.SourcePosition
+import util.{SourcePosition, Property}
import collection.mutable
import annotation.tailrec
import Implicits._
@@ -57,6 +57,8 @@ object Typer {
def assertPositioned(tree: untpd.Tree)(implicit ctx: Context) =
if (!tree.isEmpty && !tree.isInstanceOf[untpd.TypedSplice] && ctx.typerState.isGlobalCommittable)
assert(tree.pos.exists, s"position not set for $tree # ${tree.uniqueId}")
+
+ private val ExprOwner = new Property.Key[Symbol]
}
class Typer extends Namer with TypeAssigner with Applications with Implicits with Dynamic with Checking with Docstrings {
@@ -74,7 +76,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
* Note: It would be more proper to move importedFromRoot into typedIdent.
* We should check that this has no performance degradation, however.
*/
- private var importedFromRoot: Set[Symbol] = Set()
+ private var unimported: Set[Symbol] = Set()
/** Temporary data item for single call to typed ident:
* This symbol would be found under Scala2 mode, but is not
@@ -102,15 +104,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
*/
def error(msg: => Message, pos: Position) = ctx.error(msg, pos)
- /** Is this import a root import that has been shadowed by an explicit
- * import in the same program?
- */
- def isDisabled(imp: ImportInfo, site: Type): Boolean = {
- if (imp.isRootImport && (importedFromRoot contains site.termSymbol)) return true
- if (imp.hiddenRoot.exists) importedFromRoot += imp.hiddenRoot
- false
- }
-
/** Does this identifier appear as a constructor of a pattern? */
def isPatternConstr =
if (ctx.mode.isExpr && (ctx.outer.mode is Mode.Pattern))
@@ -188,32 +181,44 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** The type representing a named import with enclosing name when imported
* from given `site` and `selectors`.
*/
- def namedImportRef(site: Type, selectors: List[untpd.Tree])(implicit ctx: Context): Type = {
- def checkUnambiguous(found: Type) = {
- val other = namedImportRef(site, selectors.tail)
- if (other.exists && found.exists && (found != other))
- error(em"reference to `$name` is ambiguous; it is imported twice in ${ctx.tree}",
- tree.pos)
- found
- }
+ def namedImportRef(imp: ImportInfo)(implicit ctx: Context): Type = {
val Name = name.toTermName.decode
- selectors match {
+ def recur(selectors: List[untpd.Tree]): Type = selectors match {
case selector :: rest =>
+ def checkUnambiguous(found: Type) = {
+ val other = recur(selectors.tail)
+ if (other.exists && found.exists && (found != other))
+ error(em"reference to `$name` is ambiguous; it is imported twice in ${ctx.tree}",
+ tree.pos)
+ found
+ }
+
+ def selection(name: Name) =
+ if (imp.sym.isCompleting) {
+ ctx.warning(i"cyclic ${imp.sym}, ignored", tree.pos)
+ NoType
+ }
+ else if (unimported.nonEmpty && unimported.contains(imp.site.termSymbol))
+ NoType
+ else {
+ // Pass refctx so that any errors are reported in the context of the
+ // reference instead of the
+ checkUnambiguous(selectionType(imp.site, name, tree.pos)(refctx))
+ }
+
selector match {
case Thicket(fromId :: Ident(Name) :: _) =>
val Ident(from) = fromId
- val selName = if (name.isTypeName) from.toTypeName else from
- // Pass refctx so that any errors are reported in the context of the
- // reference instead of the context of the import.
- checkUnambiguous(selectionType(site, selName, tree.pos)(refctx))
+ selection(if (name.isTypeName) from.toTypeName else from)
case Ident(Name) =>
- checkUnambiguous(selectionType(site, name, tree.pos)(refctx))
+ selection(name)
case _ =>
- namedImportRef(site, rest)
+ recur(rest)
}
case nil =>
NoType
}
+ recur(imp.selectors)
}
/** The type representing a wildcard import with enclosing name when imported
@@ -222,7 +227,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def wildImportRef(imp: ImportInfo)(implicit ctx: Context): Type = {
if (imp.isWildcardImport) {
val pre = imp.site
- if (!isDisabled(imp, pre) && !(imp.excluded contains name.toTermName) && name != nme.CONSTRUCTOR) {
+ if (!unimported.contains(pre.termSymbol) &&
+ !imp.excluded.contains(name.toTermName) &&
+ name != nme.CONSTRUCTOR) {
val denot = pre.member(name).accessibleFrom(pre)(refctx)
if (reallyExists(denot)) return pre.select(name, denot)
}
@@ -279,19 +286,27 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (result.exists) result
else { // find import
val curImport = ctx.importInfo
+ def updateUnimported() =
+ if (curImport.unimported.exists) unimported += curImport.unimported
if (ctx.owner.is(Package) && curImport != null && curImport.isRootImport && previous.exists)
previous // no more conflicts possible in this case
- else if (isPossibleImport(namedImport) && (curImport ne outer.importInfo) && !curImport.sym.isCompleting) {
- val namedImp = namedImportRef(curImport.site, curImport.selectors)
+ else if (isPossibleImport(namedImport) && (curImport ne outer.importInfo)) {
+ val namedImp = namedImportRef(curImport)
if (namedImp.exists)
findRef(checkNewOrShadowed(namedImp, namedImport), namedImport, ctx)(outer)
- else if (isPossibleImport(wildImport)) {
+ else if (isPossibleImport(wildImport) && !curImport.sym.isCompleting) {
val wildImp = wildImportRef(curImport)
if (wildImp.exists)
findRef(checkNewOrShadowed(wildImp, wildImport), wildImport, ctx)(outer)
- else loop(outer)
+ else {
+ updateUnimported()
+ loop(outer)
+ }
+ }
+ else {
+ updateUnimported()
+ loop(outer)
}
- else loop(outer)
}
else loop(outer)
}
@@ -311,11 +326,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
return typed(desugar.patternVar(tree), pt)
}
-
val rawType = {
- val saved1 = importedFromRoot
+ val saved1 = unimported
val saved2 = foundUnderScala2
- importedFromRoot = Set.empty
+ unimported = Set.empty
foundUnderScala2 = NoType
try {
var found = findRef(NoType, BindingPrec.nothingBound, NoContext)
@@ -329,7 +343,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
found
}
finally {
- importedFromRoot = saved1
+ unimported = saved1
foundUnderScala2 = saved2
}
}
@@ -337,10 +351,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val ownType =
if (rawType.exists)
ensureAccessible(rawType, superAccess = false, tree.pos)
- else {
- error(new MissingIdent(tree, kind, name.show), tree.pos)
- ErrorType
- }
+ else
+ errorType(new MissingIdent(tree, kind, name.show), tree.pos)
val tree1 = ownType match {
case ownType: NamedType if !prefixIsElidable(ownType) =>
@@ -440,6 +452,11 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tree.tpt match {
case templ: untpd.Template =>
import untpd._
+ templ.parents foreach {
+ case parent: RefTree =>
+ typedAheadImpl(parent, tree => inferTypeParams(typedType(tree), pt))
+ case _ =>
+ }
val x = tpnme.ANON_CLASS
val clsDef = TypeDef(x, templ).withFlags(Final)
typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt)
@@ -505,7 +522,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case tref: TypeRef if !tref.symbol.isClass && !ctx.isAfterTyper =>
inferImplicit(defn.ClassTagType.appliedTo(tref),
EmptyTree, tpt1.pos)(ctx.retractMode(Mode.Pattern)) match {
- case SearchSuccess(arg, _, _) =>
+ case SearchSuccess(arg, _, _, _) =>
return typed(untpd.Apply(untpd.TypedSplice(arg), tree.expr), pt)
case _ =>
}
@@ -576,7 +593,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def typedBlockStats(stats: List[untpd.Tree])(implicit ctx: Context): (Context, List[tpd.Tree]) =
- (index(stats), typedStats(stats, ctx.owner))
+ (indexAndAnnotate(stats), typedStats(stats, ctx.owner))
def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) = track("typedBlock") {
val (exprCtx, stats1) = typedBlockStats(tree.stats)
@@ -646,9 +663,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def typedFunction(tree: untpd.Function, pt: Type)(implicit ctx: Context) = track("typedFunction") {
val untpd.Function(args, body) = tree
- if (ctx.mode is Mode.Type)
+ if (ctx.mode is Mode.Type) {
+ val funCls =
+ if (tree.isInstanceOf[untpd.ImplicitFunction]) defn.ImplicitFunctionClass(args.length)
+ else defn.FunctionClass(args.length)
typed(cpy.AppliedTypeTree(tree)(
- untpd.TypeTree(defn.FunctionClass(args.length).typeRef), args :+ body), pt)
+ untpd.TypeTree(funCls.typeRef), args :+ body), pt)
+ }
else {
val params = args.asInstanceOf[List[untpd.ValDef]]
@@ -1009,7 +1030,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(implicit ctx: Context): RefinedTypeTree = track("typedRefinedTypeTree") {
val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt)
- val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements)
+ val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withPos(tree.pos)
val refineCls = createSymbol(refineClsDef).asClass
val TypeDef(_, impl: Template) = typed(refineClsDef)
val refinements1 = impl.body
@@ -1038,7 +1059,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (hasNamedArg(args)) typedNamedArgs(args)
else {
if (args.length != tparams.length) {
- wrongNumberOfArgs(tpt1.tpe, "type", tparams, args, tree.pos)
+ wrongNumberOfTypeArgs(tpt1.tpe, tparams, args, tree.pos)
args = args.take(tparams.length)
}
def typedArg(arg: untpd.Tree, tparam: TypeParamInfo) = {
@@ -1058,7 +1079,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def typedPolyTypeTree(tree: untpd.PolyTypeTree)(implicit ctx: Context): Tree = track("typedPolyTypeTree") {
val PolyTypeTree(tparams, body) = tree
- index(tparams)
+ indexAndAnnotate(tparams)
val tparams1 = tparams.mapconserve(typed(_).asInstanceOf[TypeDef])
val body1 = typedType(tree.body)
assignType(cpy.PolyTypeTree(tree)(tparams1, body1), tparams1, body1)
@@ -1121,7 +1142,17 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = {
// necessary to force annotation trees to be computed.
sym.annotations.foreach(_.ensureCompleted)
- val annotCtx = ctx.outersIterator.dropWhile(_.owner == sym).next
+ lazy val annotCtx = {
+ val c = ctx.outersIterator.dropWhile(_.owner == sym).next
+ c.property(ExprOwner) match {
+ case Some(exprOwner) if c.owner.isClass =>
+ // We need to evaluate annotation arguments in an expression context, since
+ // classes defined in a such arguments should not be entered into the
+ // enclosing class.
+ c.exprContext(mdef, exprOwner)
+ case _ => c
+ }
+ }
// necessary in order to mark the typed ahead annotations as definitely typed:
untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation(_)(annotCtx))
}
@@ -1142,6 +1173,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (sym.is(Inline, butNot = DeferredOrParamAccessor))
checkInlineConformant(rhs1, em"right-hand side of inline $sym")
patchIfLazy(vdef1)
+ patchFinalVals(vdef1)
vdef1
}
@@ -1154,6 +1186,27 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
patch(Position(toUntyped(vdef).pos.start), "@volatile ")
}
+ /** Adds inline to final vals with idempotent rhs
+ *
+ * duplicating scalac behavior: for final vals that have rhs as constant, we do not create a field
+ * and instead return the value. This seemingly minor optimization has huge effect on initialization
+ * order and the values that can be observed during superconstructor call
+ *
+ * see remark about idempotency in PostTyper#normalizeTree
+ */
+ private def patchFinalVals(vdef: ValDef)(implicit ctx: Context): Unit = {
+ def isFinalInlinableVal(sym: Symbol): Boolean = {
+ sym.is(Final, butNot = Mutable) &&
+ isIdempotentExpr(vdef.rhs) /* &&
+ ctx.scala2Mode (stay compatible with Scala2 for now) */
+ }
+ val sym = vdef.symbol
+ sym.info match {
+ case info: ConstantType if isFinalInlinableVal(sym) && !ctx.settings.YnoInline.value => sym.setFlag(Inline)
+ case _ =>
+ }
+ }
+
def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = track("typedDefDef") {
val DefDef(name, tparams, vparamss, tpt, _) = ddef
completeAnnotations(ddef, sym)
@@ -1269,6 +1322,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
ctx.featureWarning(nme.dynamics.toString, "extension of type scala.Dynamic", isScala2Feature = true,
cls, isRequired, cdef.pos)
}
+
+ // check value class constraints
+ checkDerivedValueClass(cls, body1)
+
cdef1
// todo later: check that
@@ -1462,6 +1519,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case tree: untpd.If => typedIf(tree, pt)
case tree: untpd.Function => typedFunction(tree, pt)
case tree: untpd.Closure => typedClosure(tree, pt)
+ case tree: untpd.Import => typedImport(tree, retrieveSym(tree))
case tree: untpd.Match => typedMatch(tree, pt)
case tree: untpd.Return => typedReturn(tree)
case tree: untpd.Try => typedTry(tree, pt)
@@ -1489,9 +1547,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ => typedUnadapted(desugar(tree), pt)
}
- xtree match {
+ if (defn.isImplicitFunctionType(pt) &&
+ xtree.isTerm &&
+ !untpd.isImplicitClosure(xtree) &&
+ !ctx.isAfterTyper)
+ makeImplicitFunction(xtree, pt)
+ else xtree match {
case xtree: untpd.NameTree => typedNamed(encodeName(xtree), pt)
- case xtree: untpd.Import => typedImport(xtree, retrieveSym(xtree))
case xtree => typedUnnamed(xtree)
}
}
@@ -1500,6 +1562,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
protected def encodeName(tree: untpd.NameTree)(implicit ctx: Context): untpd.NameTree =
untpd.rename(tree, tree.name.encode)
+ protected def makeImplicitFunction(tree: untpd.Tree, pt: Type)(implicit ctx: Context): Tree = {
+ val defn.FunctionOf(formals, resType, true) = pt.dealias
+ val paramTypes = formals.map(fullyDefinedType(_, "implicit function parameter", tree.pos))
+ val ifun = desugar.makeImplicitFunction(paramTypes, tree)
+ typr.println(i"make implicit function $tree / $pt ---> $ifun")
+ typedUnadapted(ifun)
+ }
+
def typed(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = /*>|>*/ ctx.traceIndented (i"typing $tree", typr, show = true) /*<|<*/ {
assertPositioned(tree)
try adapt(typedUnadapted(tree, pt), pt, tree)
@@ -1540,7 +1610,11 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case nil =>
buf.toList
}
- traverse(stats)
+ val localCtx = {
+ val exprOwnerOpt = if (exprOwner == ctx.owner) None else Some(exprOwner)
+ ctx.withProperty(ExprOwner, exprOwnerOpt)
+ }
+ traverse(stats)(localCtx)
}
/** Given an inline method `mdef`, the method rewritten so that its body
@@ -1580,23 +1654,52 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
- /** Add apply node or implicit conversions. Two strategies are tried, and the first
- * that is successful is picked. If neither of the strategies are successful, continues with
- * `fallBack`.
+ /** Is `pt` a prototype of an `apply` selection, or a parameterless function yielding one? */
+ def isApplyProto(pt: Type)(implicit ctx: Context): Boolean = pt match {
+ case pt: SelectionProto => pt.name == nme.apply
+ case pt: FunProto => pt.args.isEmpty && isApplyProto(pt.resultType)
+ case pt: IgnoredProto => isApplyProto(pt.ignored)
+ case _ => false
+ }
+
+ /** Potentially add apply node or implicit conversions. Before trying either,
+ * if the function is applied to an empty parameter list (), we try
+ *
+ * 0th strategy: If `tree` overrides a nullary method, mark the prototype
+ * so that the argument is dropped and return `tree` itself.
+ *
+ * After that, two strategies are tried, and the first that is successful is picked.
+ * If neither of the strategies are successful, continues with`fallBack`.
*
* 1st strategy: Try to insert `.apply` so that the result conforms to prototype `pt`.
+ * This strategy is not tried if the prototype represents already
+ * another `.apply` or `.apply()` selection.
+ *
* 2nd strategy: If tree is a select `qual.name`, try to insert an implicit conversion
* around the qualifier part `qual` so that the result conforms to the expected type
* with wildcard result type.
*/
- def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: (Tree, TyperState) => Tree)(implicit ctx: Context): Tree =
- tryEither { implicit ctx =>
+ def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType)(fallBack: => Tree)(implicit ctx: Context): Tree = {
+
+ def tryApply(implicit ctx: Context) = {
val sel = typedSelect(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt)
if (sel.tpe.isError) sel else adapt(sel, pt)
- } { (failedTree, failedState) =>
- tryInsertImplicitOnQualifier(tree, pt).getOrElse(fallBack(failedTree, failedState))
}
+ def tryImplicit =
+ tryInsertImplicitOnQualifier(tree, pt).getOrElse(fallBack)
+
+ pt match {
+ case pt @ FunProto(Nil, _, _)
+ if tree.symbol.allOverriddenSymbols.exists(_.info.isNullaryMethod) =>
+ pt.markAsDropped()
+ tree
+ case _ =>
+ if (isApplyProto(pt)) tryImplicit
+ else tryEither(tryApply(_))((_, _) => tryImplicit)
+ }
+ }
+
/** If this tree is a select node `qual.name`, try to insert an implicit conversion
* `c` around `qual` so that `c(qual).name` conforms to `pt`.
*/
@@ -1688,7 +1791,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def hasEmptyParams(denot: SingleDenotation) = denot.info.paramTypess == ListOfNil
pt match {
case pt: FunProto =>
- tryInsertApplyOrImplicit(tree, pt)((_, _) => noMatches)
+ tryInsertApplyOrImplicit(tree, pt)(noMatches)
case _ =>
if (altDenots exists (_.info.paramTypess == ListOfNil))
typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt)
@@ -1727,7 +1830,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case Apply(_, _) => " more"
case _ => ""
}
- (_, _) => errorTree(tree, em"$methodStr does not take$more parameters")
+ errorTree(tree, em"$methodStr does not take$more parameters")
}
}
@@ -1828,9 +1931,18 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
missingArgs
case _ =>
ctx.typeComparer.GADTused = false
- if (ctx.mode is Mode.Pattern) {
+ if (defn.isImplicitFunctionClass(wtp.underlyingClassRef(refinementOK = false).classSymbol) &&
+ !untpd.isImplicitClosure(tree) &&
+ !isApplyProto(pt) &&
+ !ctx.isAfterTyper) {
+ typr.println("insert apply on implicit $tree")
+ typed(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt)
+ }
+ else if (ctx.mode is Mode.Pattern) {
tree match {
- case _: RefTree | _: Literal if !isVarPattern(tree) =>
+ case _: RefTree | _: Literal
+ if !isVarPattern(tree) &&
+ !(tree.tpe <:< pt)(ctx.addMode(Mode.GADTflexible)) =>
checkCanEqual(pt, wtp, tree.pos)(ctx.retractMode(Mode.Pattern))
case _ =>
}
@@ -1899,7 +2011,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
// try an implicit conversion
inferView(tree, pt) match {
- case SearchSuccess(inferred, _, _) =>
+ case SearchSuccess(inferred, _, _, _) =>
adapt(inferred, pt)
case failure: SearchFailure =>
if (pt.isInstanceOf[ProtoType] && !failure.isInstanceOf[AmbiguousImplicits]) tree
@@ -1915,10 +2027,25 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
else err.typeMismatch(tree1, pt)
}
+ /** If tree has an error type but no errors are reported yet, issue
+ * the error message stored in the type.
+ * One way this can happen is if implicit search causes symbols and types
+ * to be completed. The types are stored by `typedAhead` so that they can be
+ * retrieved later and thus avoid duplication of typechecking work.
+ * But if the implicit search causing the `typedAhead` fails locally but
+ * another alternative succeeds we can be left with an ErrorType in the
+ * tree that went unreported. A scenario where this happens is i1802.scala.
+ */
+ def ensureReported(tp: Type) = tp match {
+ case err: ErrorType if !ctx.reporter.hasErrors => ctx.error(err.msg, tree.pos)
+ case _ =>
+ }
+
tree match {
case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[_] => tree
case _ => tree.tpe.widen match {
- case _: ErrorType =>
+ case tp: FlexType =>
+ ensureReported(tp)
tree
case ref: TermRef =>
pt match {
@@ -1932,21 +2059,19 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (pt.isInstanceOf[PolyProto]) tree
else {
var typeArgs = tree match {
- case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo
+ case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree)
case _ => Nil
}
if (typeArgs.isEmpty) typeArgs = constrained(poly, tree)._2
convertNewGenericArray(
- adaptInterpolated(tree.appliedToTypes(typeArgs), pt, original))
+ adaptInterpolated(tree.appliedToTypeTrees(typeArgs), pt, original))
}
case wtp =>
pt match {
case pt: FunProto =>
adaptToArgs(wtp, pt)
case pt: PolyProto =>
- tryInsertApplyOrImplicit(tree, pt) {
- (_, _) => tree // error will be reported in typedTypeApply
- }
+ tryInsertApplyOrImplicit(tree, pt)(tree) // error will be reported in typedTypeApply
case _ =>
if (ctx.mode is Mode.Type) adaptType(tree.tpe)
else adaptNoArgs(wtp)
diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala
index bae3b4732..6f95b87c4 100644
--- a/compiler/src/dotty/tools/dotc/util/Chars.scala
+++ b/compiler/src/dotty/tools/dotc/util/Chars.scala
@@ -6,7 +6,6 @@ package dotty.tools.dotc
package util
import scala.annotation.switch
-import java.lang.{ Character => JCharacter }
import java.lang.{Character => JCharacter}
import java.lang.Character.LETTER_NUMBER
import java.lang.Character.LOWERCASE_LETTER
@@ -66,16 +65,16 @@ object Chars {
/** Can character start an alphanumeric Scala identifier? */
def isIdentifierStart(c: Char): Boolean =
- (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
+ (c == '_') || (c == '$') || JCharacter.isUnicodeIdentifierStart(c)
/** Can character form part of an alphanumeric Scala identifier? */
def isIdentifierPart(c: Char) =
- (c == '$') || Character.isUnicodeIdentifierPart(c)
+ (c == '$') || JCharacter.isUnicodeIdentifierPart(c)
/** Is character a math or other symbol in Unicode? */
def isSpecial(c: Char) = {
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
+ val chtp = JCharacter.getType(c)
+ chtp == JCharacter.MATH_SYMBOL.toInt || chtp == JCharacter.OTHER_SYMBOL.toInt
}
private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
diff --git a/compiler/test/dotc/scala-collections.blacklist b/compiler/test/dotc/scala-collections.blacklist
new file mode 100644
index 000000000..321814ab0
--- /dev/null
+++ b/compiler/test/dotc/scala-collections.blacklist
@@ -0,0 +1,83 @@
+## Errors having to do with bootstrap
+
+../scala-scala/src/library/scala/Function1.scala
+../scala-scala/src/library/scala/Function2.scala
+# Cyclic reference because of @specialized annotation
+
+
+## Errors having to do with deep subtypes
+
+../scala-scala/src/library/scala/collection/generic/ParSetFactory.scala
+# This gives a deep subtype violation when run with the rest of the whitelist.
+# Works without -Yno-deep-subtypes, though.
+
+../scala-scala/src/library/scala/collection/parallel/mutable/ParMap.scala
+# -Yno-deep-subtypes fails
+
+../scala-scala/src/library/scala/collection/parallel/ParMap.scala
+# -Yno-deep-subtypes fails
+
+../scala-scala/src/library/scala/collection/parallel/ParMapLike.scala
+# -Yno-deep-subtypes fails
+
+
+
+## Ycheck failures, presumably linked to TailCalls
+
+../scala-scala/src/library/scala/collection/parallel/ParIterableLike.scala
+# -Ycheck:classOf fails
+
+../scala-scala/src/library/scala/collection/parallel/ParSeqLike.scala
+# -Ycheck:classOf fails
+
+../scala-scala/src/library/scala/util/control/TailCalls.scala
+# -Ycheck:classOf fails
+
+
+
+## Errors having to do with unavailable APIs or language features:
+
+../scala-scala/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+# 51 | import Manifest._
+# | ^^^^^^^^
+# | not found: Manifest
+
+../scala-scala/src/library/scala/reflect/ClassTag.scala
+# 124 | val Short : ClassTag[scala.Short] = Manifest.Short
+# | ^^^^^^^^
+# | not found: Manifest
+
+../scala-scala/src/library/scala/reflect/Manifest.scala
+# 104 | private def readResolve(): Any = Manifest.Short
+# | ^^^^^^^^
+# | not found: Manifest
+
+../scala-scala/src/library/scala/text/Document.scala
+# Lots of type errors for pattern matches, having to do with the fact
+# that Document contains a :: method without corresponding extractor,
+# but still wants to extract lists using ::. We won't support that.
+# Since Document should have been removed already, let's ignore it.
+
+../scala-scala/src/library/scala/AnyVal.scala
+# 55 |abstract class AnyVal extends Any {
+# |^
+# |illegal redefinition of standard class AnyVal
+# (This is intended)
+
+../scala-scala/src/library/scala/collection/parallel/Tasks.scala
+# java.lang.StackOverflowError
+
+../scala-scala/src/library/scala/reflect/package.scala
+# 63 | private[scala] def materializeClassTag[T](): ClassTag[T] = macro ???
+# | ^^^^^
+# | not found: macro
+
+../scala-scala/src/library/scala/StringContext.scala
+# 168 | def f[A >: Any](args: A*): String = macro ???
+# | ^^^^^
+# | not found: macro
+
+../scala-scala/src/library/scala/util/control/Exception.scala
+# 51 | implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]) =
+# | ^
+# | cyclic reference involving method mkCatcher
diff --git a/compiler/test/dotc/scala-collections.whitelist b/compiler/test/dotc/scala-collections.whitelist
index bb62b260a..57c71d647 100644
--- a/compiler/test/dotc/scala-collections.whitelist
+++ b/compiler/test/dotc/scala-collections.whitelist
@@ -7,6 +7,8 @@
../scala-scala/src/library/scala/runtime/RichInt.scala
../scala-scala/src/library/scala/runtime/RichLong.scala
../scala-scala/src/library/scala/runtime/RichShort.scala
+../scala-scala/src/library/scala/runtime/Tuple2Zipped.scala
+../scala-scala/src/library/scala/runtime/Tuple3Zipped.scala
../scala-scala/src/library/scala/Array.scala
../scala-scala/src/library/scala/NotImplementedError.scala
../scala-scala/src/library/scala/AnyValCompanion.scala
@@ -62,6 +64,7 @@
../scala-scala/src/library/scala/Serializable.scala
../scala-scala/src/library/scala/Specializable.scala
../scala-scala/src/library/scala/Symbol.scala
+#../scala-scala/src/library/scala/StringContext.scala
../scala-scala/src/library/scala/UninitializedError.scala
../scala-scala/src/library/scala/UninitializedFieldError.scala
../scala-scala/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -86,6 +89,14 @@
../scala-scala/src/library/scala/collection/mutable/BufferLike.scala
../scala-scala/src/library/scala/collection/mutable/ArrayBuilder.scala
+../scala-scala/src/library/scala/collection/mutable/ObservableBuffer.scala
+../scala-scala/src/library/scala/collection/mutable/DefaultEntry.scala
+../scala-scala/src/library/scala/collection/mutable/LinkedHashMap.scala
+../scala-scala/src/library/scala/collection/mutable/ObservableMap.scala
+../scala-scala/src/library/scala/collection/mutable/ObservableSet.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedQueue.scala
+../scala-scala/src/library/scala/collection/mutable/UnrolledBuffer.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedBuffer.scala
../scala-scala/src/library/scala/collection/immutable/Stack.scala
../scala-scala/src/library/scala/collection/immutable/StringLike.scala
@@ -264,10 +275,9 @@
../scala-scala/src/library/scala/collection/generic/SetFactory.scala
../scala-scala/src/library/scala/collection/generic/ParFactory.scala
-# https://github.com/lampepfl/dotty/issues/974 -> @smarter
../scala-scala/src/library/scala/collection/generic/MutableSortedSetFactory.scala
-# cyclic reference, maybe related to #974 -> @smarter
+# deep subtype
#../scala-scala/src/library/scala/collection/generic/ParSetFactory.scala
../scala-scala/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -280,3 +290,339 @@
../scala-scala/src/library/scala/collection/generic/Subtractable.scala
../scala-scala/src/library/scala/collection/generic/TraversableFactory.scala
../scala-scala/src/library/scala/collection/generic/package.scala
+
+../scala-scala/src/library/scala/util/Try.scala
+
+#../scala-scala/src/library/scala/util/control/Exception.scala
+../scala-scala/src/library/scala/util/control/Breaks.scala
+../scala-scala/src/library/scala/util/control/ControlThrowable.scala
+../scala-scala/src/library/scala/util/control/NonFatal.scala
+../scala-scala/src/library/scala/util/control/NoStackTrace.scala
+../scala-scala/src/library/scala/util/DynamicVariable.scala
+../scala-scala/src/library/scala/util/Either.scala
+../scala-scala/src/library/scala/util/hashing/Hashing.scala
+../scala-scala/src/library/scala/util/hashing/ByteswapHashing.scala
+../scala-scala/src/library/scala/util/hashing/MurmurHash3.scala
+../scala-scala/src/library/scala/util/hashing/package.scala
+../scala-scala/src/library/scala/util/matching/Regex.scala
+../scala-scala/src/library/scala/util/MurmurHash.scala
+../scala-scala/src/library/scala/util/Properties.scala
+../scala-scala/src/library/scala/util/Random.scala
+../scala-scala/src/library/scala/util/Sorting.scala
+
+../scala-scala/src/library/scala/collection/mutable/AnyRefMap.scala
+../scala-scala/src/library/scala/collection/mutable/ArrayBuffer.scala
+../scala-scala/src/library/scala/collection/mutable/ArrayLike.scala
+../scala-scala/src/library/scala/collection/mutable/ArrayOps.scala
+../scala-scala/src/library/scala/collection/mutable/ArraySeq.scala
+../scala-scala/src/library/scala/collection/mutable/ArrayStack.scala
+../scala-scala/src/library/scala/collection/mutable/AVLTree.scala
+../scala-scala/src/library/scala/collection/mutable/BitSet.scala
+../scala-scala/src/library/scala/collection/mutable/Buffer.scala
+../scala-scala/src/library/scala/collection/mutable/BufferProxy.scala
+../scala-scala/src/library/scala/collection/mutable/Cloneable.scala
+../scala-scala/src/library/scala/collection/mutable/DefaultMapModel.scala
+../scala-scala/src/library/scala/collection/mutable/DoubleLinkedList.scala
+../scala-scala/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+../scala-scala/src/library/scala/collection/mutable/FlatHashTable.scala
+../scala-scala/src/library/scala/collection/mutable/HashEntry.scala
+../scala-scala/src/library/scala/collection/mutable/HashMap.scala
+../scala-scala/src/library/scala/collection/mutable/HashSet.scala
+../scala-scala/src/library/scala/collection/mutable/HashTable.scala
+../scala-scala/src/library/scala/collection/mutable/History.scala
+../scala-scala/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+../scala-scala/src/library/scala/collection/mutable/IndexedSeqLike.scala
+../scala-scala/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+../scala-scala/src/library/scala/collection/mutable/Iterable.scala
+../scala-scala/src/library/scala/collection/mutable/LazyBuilder.scala
+../scala-scala/src/library/scala/collection/mutable/LinearSeq.scala
+../scala-scala/src/library/scala/collection/mutable/LinkedEntry.scala
+../scala-scala/src/library/scala/collection/mutable/LinkedHashSet.scala
+../scala-scala/src/library/scala/collection/mutable/LinkedList.scala
+../scala-scala/src/library/scala/collection/mutable/LinkedListLike.scala
+../scala-scala/src/library/scala/collection/mutable/ListMap.scala
+../scala-scala/src/library/scala/collection/mutable/LongMap.scala
+../scala-scala/src/library/scala/collection/mutable/Map.scala
+../scala-scala/src/library/scala/collection/mutable/MapBuilder.scala
+../scala-scala/src/library/scala/collection/mutable/MapLike.scala
+../scala-scala/src/library/scala/collection/mutable/MapProxy.scala
+../scala-scala/src/library/scala/collection/mutable/MultiMap.scala
+../scala-scala/src/library/scala/collection/mutable/MutableList.scala
+../scala-scala/src/library/scala/collection/mutable/OpenHashMap.scala
+../scala-scala/src/library/scala/collection/mutable/PriorityQueue.scala
+../scala-scala/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+../scala-scala/src/library/scala/collection/mutable/Publisher.scala
+../scala-scala/src/library/scala/collection/mutable/Queue.scala
+../scala-scala/src/library/scala/collection/mutable/QueueProxy.scala
+../scala-scala/src/library/scala/collection/mutable/ResizableArray.scala
+../scala-scala/src/library/scala/collection/mutable/RevertibleHistory.scala
+../scala-scala/src/library/scala/collection/mutable/Seq.scala
+../scala-scala/src/library/scala/collection/mutable/SeqLike.scala
+../scala-scala/src/library/scala/collection/mutable/Set.scala
+../scala-scala/src/library/scala/collection/mutable/SetBuilder.scala
+../scala-scala/src/library/scala/collection/mutable/SetLike.scala
+../scala-scala/src/library/scala/collection/mutable/SetProxy.scala
+../scala-scala/src/library/scala/collection/mutable/SortedSet.scala
+../scala-scala/src/library/scala/collection/mutable/Stack.scala
+../scala-scala/src/library/scala/collection/mutable/StackProxy.scala
+../scala-scala/src/library/scala/collection/mutable/StringBuilder.scala
+../scala-scala/src/library/scala/collection/mutable/Subscriber.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedMap.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedSet.scala
+../scala-scala/src/library/scala/collection/mutable/SynchronizedStack.scala
+../scala-scala/src/library/scala/collection/mutable/Traversable.scala
+../scala-scala/src/library/scala/collection/mutable/TreeSet.scala
+../scala-scala/src/library/scala/collection/mutable/Undoable.scala
+../scala-scala/src/library/scala/collection/mutable/WeakHashMap.scala
+../scala-scala/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+
+../scala-scala/src/library/scala/collection/convert/DecorateAsJava.scala
+../scala-scala/src/library/scala/collection/convert/DecorateAsScala.scala
+../scala-scala/src/library/scala/collection/convert/Decorators.scala
+../scala-scala/src/library/scala/collection/convert/package.scala
+../scala-scala/src/library/scala/collection/convert/WrapAsJava.scala
+../scala-scala/src/library/scala/collection/convert/WrapAsScala.scala
+../scala-scala/src/library/scala/collection/convert/Wrappers.scala
+
+../scala-scala/src/library/scala/collection/concurrent/Map.scala
+../scala-scala/src/library/scala/collection/concurrent/TrieMap.scala
+
+../scala-scala/src/library/scala/collection/parallel/immutable/package.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParIterable.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParMap.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParRange.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParSeq.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParSet.scala
+../scala-scala/src/library/scala/collection/parallel/immutable/ParVector.scala
+
+../scala-scala/src/library/scala/compat/Platform.scala
+
+../scala-scala/src/library/scala/sys/package.scala
+../scala-scala/src/library/scala/sys/SystemProperties.scala
+../scala-scala/src/library/scala/sys/Prop.scala
+../scala-scala/src/library/scala/sys/PropImpl.scala
+../scala-scala/src/library/scala/sys/BooleanProp.scala
+../scala-scala/src/library/scala/sys/ShutdownHookThread.scala
+../scala-scala/src/library/scala/sys/process/BasicIO.scala
+../scala-scala/src/library/scala/sys/process/package.scala
+../scala-scala/src/library/scala/sys/process/Process.scala
+../scala-scala/src/library/scala/sys/process/ProcessBuilder.scala
+../scala-scala/src/library/scala/sys/process/ProcessBuilderImpl.scala
+../scala-scala/src/library/scala/sys/process/ProcessImpl.scala
+../scala-scala/src/library/scala/sys/process/ProcessIO.scala
+../scala-scala/src/library/scala/sys/process/ProcessLogger.scala
+
+../scala-scala/src/library/scala/beans/BeanDescription.scala
+../scala-scala/src/library/scala/beans/BeanDisplayName.scala
+../scala-scala/src/library/scala/beans/BeanInfo.scala
+../scala-scala/src/library/scala/beans/BeanInfoSkip.scala
+../scala-scala/src/library/scala/beans/BeanProperty.scala
+../scala-scala/src/library/scala/beans/BooleanBeanProperty.scala
+../scala-scala/src/library/scala/beans/ScalaBeanInfo.scala
+
+../scala-scala/src/library/scala/io/AnsiColor.scala
+../scala-scala/src/library/scala/io/Codec.scala
+../scala-scala/src/library/scala/io/Position.scala
+../scala-scala/src/library/scala/io/StdIn.scala
+../scala-scala/src/library/scala/io/BufferedSource.scala
+../scala-scala/src/library/scala/io/Source.scala
+
+../scala-scala/src/library/scala/math/BigDecimal.scala
+../scala-scala/src/library/scala/math/BigInt.scala
+../scala-scala/src/library/scala/math/PartiallyOrdered.scala
+
+../scala-scala/src/library/scala/ref/PhantomReference.scala
+../scala-scala/src/library/scala/ref/Reference.scala
+../scala-scala/src/library/scala/ref/ReferenceQueue.scala
+../scala-scala/src/library/scala/ref/ReferenceWrapper.scala
+../scala-scala/src/library/scala/ref/SoftReference.scala
+../scala-scala/src/library/scala/ref/WeakReference.scala
+
+../scala-scala/src/library/scala/reflect/macros/internal/macroImpl.scala
+../scala-scala/src/library/scala/reflect/NoManifest.scala
+../scala-scala/src/library/scala/reflect/OptManifest.scala
+../scala-scala/src/library/scala/reflect/NameTransformer.scala
+#../scala-scala/src/library/scala/reflect/package.scala
+
+../scala-scala/src/library/scala/Responder.scala
+
+../scala-scala/src/library/scala/collection/script/Location.scala
+../scala-scala/src/library/scala/collection/script/Message.scala
+../scala-scala/src/library/scala/collection/script/Scriptable.scala
+
+../scala-scala/src/library/scala/concurrent/package.scala
+../scala-scala/src/library/scala/concurrent/Future.scala
+../scala-scala/src/library/scala/concurrent/Awaitable.scala
+../scala-scala/src/library/scala/concurrent/BatchingExecutor.scala
+../scala-scala/src/library/scala/concurrent/BlockContext.scala
+../scala-scala/src/library/scala/concurrent/Channel.scala
+../scala-scala/src/library/scala/concurrent/DelayedLazyVal.scala
+../scala-scala/src/library/scala/concurrent/duration/Deadline.scala
+../scala-scala/src/library/scala/concurrent/duration/Duration.scala
+../scala-scala/src/library/scala/concurrent/duration/DurationConversions.scala
+../scala-scala/src/library/scala/concurrent/duration/package.scala
+../scala-scala/src/library/scala/concurrent/ExecutionContext.scala
+../scala-scala/src/library/scala/concurrent/FutureTaskRunner.scala
+../scala-scala/src/library/scala/concurrent/impl/Future.scala
+../scala-scala/src/library/scala/concurrent/impl/Promise.scala
+../scala-scala/src/library/scala/concurrent/JavaConversions.scala
+../scala-scala/src/library/scala/concurrent/Lock.scala
+../scala-scala/src/library/scala/concurrent/ManagedBlocker.scala
+../scala-scala/src/library/scala/concurrent/Promise.scala
+../scala-scala/src/library/scala/concurrent/SyncVar.scala
+../scala-scala/src/library/scala/concurrent/TaskRunner.scala
+../scala-scala/src/library/scala/concurrent/ThreadPoolRunner.scala
+../scala-scala/src/library/scala/concurrent/SyncChannel.scala
+../scala-scala/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+
+../scala-scala/src/library/scala/collection/parallel/package.scala
+../scala-scala/src/library/scala/collection/parallel/ParIterable.scala
+#../scala-scala/src/library/scala/collection/parallel/ParMap.scala
+#../scala-scala/src/library/scala/collection/parallel/ParMapLike.scala
+#../scala-scala/src/library/scala/collection/parallel/ParIterableLike.scala
+#../scala-scala/src/library/scala/collection/parallel/ParSeqLike.scala
+../scala-scala/src/library/scala/collection/parallel/Combiner.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+../scala-scala/src/library/scala/collection/parallel/RemainsIterator.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/package.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParIterable.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParSeq.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParSet.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+../scala-scala/src/library/scala/collection/parallel/mutable/ParArray.scala
+#../scala-scala/src/library/scala/collection/parallel/mutable/ParMap.scala
+../scala-scala/src/library/scala/collection/parallel/ParSeq.scala
+../scala-scala/src/library/scala/collection/parallel/ParSet.scala
+../scala-scala/src/library/scala/collection/parallel/ParSetLike.scala
+../scala-scala/src/library/scala/collection/parallel/PreciseSplitter.scala
+../scala-scala/src/library/scala/collection/parallel/Splitter.scala
+../scala-scala/src/library/scala/collection/parallel/TaskSupport.scala
+#../scala-scala/src/library/scala/collection/parallel/Tasks.scala
+
+../scala-scala/src/library/scala/Console.scala
+../scala-scala/src/library/scala/Enumeration.scala
+
+../scala-scala/src/library/scala/annotation/Annotation.scala
+../scala-scala/src/library/scala/annotation/bridge.scala
+../scala-scala/src/library/scala/annotation/ClassfileAnnotation.scala
+../scala-scala/src/library/scala/annotation/compileTimeOnly.scala
+../scala-scala/src/library/scala/annotation/elidable.scala
+../scala-scala/src/library/scala/annotation/implicitNotFound.scala
+../scala-scala/src/library/scala/annotation/meta/beanGetter.scala
+../scala-scala/src/library/scala/annotation/meta/beanSetter.scala
+../scala-scala/src/library/scala/annotation/meta/companionClass.scala
+../scala-scala/src/library/scala/annotation/meta/companionMethod.scala
+../scala-scala/src/library/scala/annotation/meta/companionObject.scala
+../scala-scala/src/library/scala/annotation/meta/field.scala
+../scala-scala/src/library/scala/annotation/meta/getter.scala
+../scala-scala/src/library/scala/annotation/meta/languageFeature.scala
+../scala-scala/src/library/scala/annotation/meta/package.scala
+../scala-scala/src/library/scala/annotation/meta/param.scala
+../scala-scala/src/library/scala/annotation/meta/setter.scala
+../scala-scala/src/library/scala/annotation/migration.scala
+../scala-scala/src/library/scala/annotation/StaticAnnotation.scala
+../scala-scala/src/library/scala/annotation/strictfp.scala
+../scala-scala/src/library/scala/annotation/switch.scala
+../scala-scala/src/library/scala/annotation/tailrec.scala
+../scala-scala/src/library/scala/annotation/TypeConstraint.scala
+../scala-scala/src/library/scala/annotation/unchecked/uncheckedStable.scala
+../scala-scala/src/library/scala/annotation/unchecked/uncheckedVariance.scala
+../scala-scala/src/library/scala/annotation/unspecialized.scala
+../scala-scala/src/library/scala/annotation/varargs.scala
+
+../scala-scala/src/library/scala/runtime/AbstractFunction0.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction1.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction10.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction11.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction12.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction13.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction14.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction15.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction16.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction17.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction18.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction19.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction2.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction20.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction21.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction22.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction3.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction4.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction5.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction6.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction7.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction8.scala
+../scala-scala/src/library/scala/runtime/AbstractFunction9.scala
+../scala-scala/src/library/scala/runtime/AbstractPartialFunction.scala
+../scala-scala/src/library/scala/runtime/Boxed.scala
+../scala-scala/src/library/scala/runtime/MethodCache.scala
+../scala-scala/src/library/scala/runtime/NonLocalReturnControl.scala
+../scala-scala/src/library/scala/runtime/Nothing$.scala
+../scala-scala/src/library/scala/runtime/Null$.scala
+../scala-scala/src/library/scala/runtime/package.scala
+../scala-scala/src/library/scala/runtime/ScalaNumberProxy.scala
+../scala-scala/src/library/scala/runtime/ScalaRunTime.scala
+../scala-scala/src/library/scala/runtime/SeqCharSequence.scala
+../scala-scala/src/library/scala/runtime/StringAdd.scala
+../scala-scala/src/library/scala/runtime/StringFormat.scala
+
+../scala-scala/src/library/scala/App.scala
+
+../scala-scala/src/library/scala/Function.scala
+../scala-scala/src/library/scala/Function0.scala
+#../scala-scala/src/library/scala/Function1.scala
+#../scala-scala/src/library/scala/Function2.scala
+../scala-scala/src/library/scala/Function3.scala
+../scala-scala/src/library/scala/Function4.scala
+../scala-scala/src/library/scala/Function5.scala
+../scala-scala/src/library/scala/Function6.scala
+../scala-scala/src/library/scala/Function7.scala
+../scala-scala/src/library/scala/Function8.scala
+../scala-scala/src/library/scala/Function9.scala
+../scala-scala/src/library/scala/Function10.scala
+../scala-scala/src/library/scala/Function11.scala
+../scala-scala/src/library/scala/Function12.scala
+../scala-scala/src/library/scala/Function13.scala
+../scala-scala/src/library/scala/Function14.scala
+../scala-scala/src/library/scala/Function15.scala
+../scala-scala/src/library/scala/Function16.scala
+../scala-scala/src/library/scala/Function17.scala
+../scala-scala/src/library/scala/Function18.scala
+../scala-scala/src/library/scala/Function19.scala
+../scala-scala/src/library/scala/Function20.scala
+../scala-scala/src/library/scala/Function21.scala
+../scala-scala/src/library/scala/Function22.scala
+
+../scala-scala/src/library/scala/Tuple1.scala
+../scala-scala/src/library/scala/Tuple2.scala
+../scala-scala/src/library/scala/Tuple3.scala
+../scala-scala/src/library/scala/Tuple4.scala
+../scala-scala/src/library/scala/Tuple5.scala
+../scala-scala/src/library/scala/Tuple6.scala
+../scala-scala/src/library/scala/Tuple7.scala
+../scala-scala/src/library/scala/Tuple8.scala
+../scala-scala/src/library/scala/Tuple9.scala
+../scala-scala/src/library/scala/Tuple10.scala
+../scala-scala/src/library/scala/Tuple11.scala
+../scala-scala/src/library/scala/Tuple12.scala
+../scala-scala/src/library/scala/Tuple13.scala
+../scala-scala/src/library/scala/Tuple14.scala
+../scala-scala/src/library/scala/Tuple15.scala
+../scala-scala/src/library/scala/Tuple16.scala
+../scala-scala/src/library/scala/Tuple17.scala
+../scala-scala/src/library/scala/Tuple18.scala
+../scala-scala/src/library/scala/Tuple19.scala
+../scala-scala/src/library/scala/Tuple20.scala
+../scala-scala/src/library/scala/Tuple21.scala
+../scala-scala/src/library/scala/Tuple22.scala
diff --git a/compiler/test/dotc/tests.scala b/compiler/test/dotc/tests.scala
index b389c6ae9..a7961d937 100644
--- a/compiler/test/dotc/tests.scala
+++ b/compiler/test/dotc/tests.scala
@@ -3,6 +3,7 @@ package dotc
import dotty.Jars
import dotty.tools.dotc.CompilerTest
import org.junit.{Before, Test}
+import org.junit.Assert._
import java.io.{ File => JFile }
import scala.reflect.io.Directory
@@ -84,6 +85,7 @@ class tests extends CompilerTest {
val runDir = testsDir + "run/"
val newDir = testsDir + "new/"
val replDir = testsDir + "repl/"
+ val javaDir = testsDir + "pos-java-interop/"
val sourceDir = "./src/"
val dottyDir = sourceDir + "dotty/"
@@ -96,6 +98,9 @@ class tests extends CompilerTest {
val typerDir = dotcDir + "typer/"
val libDir = "../library/src/"
+ def dottyBootedLib = compileDir(libDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ::: defaultOptions)(allowDeepSubtypes) // note the -deep argument
+ def dottyDependsOnBootedLib = compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ::: defaultOptions)(allowDeepSubtypes) // note the -deep argument
+
@Before def cleanup(): Unit = {
// remove class files from stdlib and tests compilation
Directory(defaultOutputDir + "scala").deleteRecursively()
@@ -196,12 +201,49 @@ class tests extends CompilerTest {
@Test def run_all = runFiles(runDir)
- val stdlibFiles = Source.fromFile("./test/dotc/scala-collections.whitelist", "UTF8").getLines()
- .map(_.trim) // allow identation
- .filter(!_.startsWith("#")) // allow comment lines prefixed by #
- .map(_.takeWhile(_ != '#').trim) // allow comments in the end of line
- .filter(_.nonEmpty)
- .toList
+ def loadList(path: String) = Source.fromFile(path, "UTF8").getLines()
+ .map(_.trim) // allow identation
+ .filter(!_.startsWith("#")) // allow comment lines prefixed by #
+ .map(_.takeWhile(_ != '#').trim) // allow comments in the end of line
+ .filter(_.nonEmpty)
+ .toList
+
+ private def stdlibWhitelistFile = "./test/dotc/scala-collections.whitelist"
+ private def stdlibBlackFile = "./test/dotc/scala-collections.blacklist"
+
+ private val stdlibFiles: List[String] = loadList(stdlibWhitelistFile)
+
+ @Test def checkWBLists = {
+ val stdlibFilesBlackListed = loadList(stdlibBlackFile)
+
+ def checkForRepeated(list: List[String], listFile: String) = {
+ val duplicates = list.groupBy(x => x).filter(_._2.size > 1).filter(_._2.size > 1)
+ val msg = duplicates.map(x => s"'${x._1}' appears ${x._2.size} times").mkString(s"Duplicate entries in $listFile:\n", "\n", "\n")
+ assertTrue(msg, duplicates.isEmpty)
+ }
+ checkForRepeated(stdlibFiles, stdlibWhitelistFile)
+ checkForRepeated(stdlibFilesBlackListed, stdlibBlackFile)
+
+ val whitelistSet = stdlibFiles.toSet
+ val blacklistSet = stdlibFilesBlackListed.toSet
+
+ val intersection = whitelistSet.intersect(blacklistSet)
+ val msgIntersection =
+ intersection.map(x => s"'$x'").mkString(s"Entries where found in both $stdlibWhitelistFile and $stdlibBlackFile:\n", "\n", "\n")
+ assertTrue(msgIntersection, intersection.isEmpty)
+
+ def collectAllFilesInDir(dir: JFile, acc: List[String]): List[String] = {
+ val files = dir.listFiles()
+ val acc2 = files.foldLeft(acc)((acc1, file) => if (file.isFile && file.getPath.endsWith(".scala")) file.getPath :: acc1 else acc1)
+ files.foldLeft(acc2)((acc3, file) => if (file.isDirectory) collectAllFilesInDir(file, acc3) else acc3)
+ }
+ val filesInStdLib = collectAllFilesInDir(new JFile("../scala-scala/src/library/"), Nil)
+ val missingFiles = filesInStdLib.toSet -- whitelistSet -- blacklistSet
+ val msgMissing =
+ missingFiles.map(x => s"'$x'").mkString(s"Entries are missing in $stdlibWhitelistFile or $stdlibBlackFile:\n", "\n", "\n")
+ assertTrue(msgMissing, missingFiles.isEmpty)
+ }
+
@Test def compileStdLib = compileList("compileStdLib", stdlibFiles, "-migration" :: "-Yno-inline" :: scala2mode)
@Test def compileMixed = compileLine(
@@ -214,11 +256,10 @@ class tests extends CompilerTest {
|../scala-scala/src/library/scala/collection/generic/GenSeqFactory.scala""".stripMargin)
@Test def compileIndexedSeq = compileLine("../scala-scala/src/library/scala/collection/immutable/IndexedSeq.scala")
- // Not a junit test anymore since it is order dependent
- def dottyBootedLib = compileDir(libDir, ".")(allowDeepSubtypes) // note the -deep argument
-
- // Not a junit test anymore since it is order dependent
- def dottyDependsOnBootedLib = compileDir(dottyDir, ".")(allowDeepSubtypes) // note the -deep argument
+ @Test def dotty = {
+ dottyBootedLib
+ dottyDependsOnBootedLib
+ }
@Test def dotc_ast = compileDir(dotcDir, "ast")
@Test def dotc_config = compileDir(dotcDir, "config")
@@ -238,7 +279,7 @@ class tests extends CompilerTest {
@Test def dotc_typer = compileDir(dotcDir, "typer")// twice omitted to make tests run faster
// error: error while loading Checking$$anon$2$,
- // class file 'target/scala-2.11/dotty_2.11-0.1-SNAPSHOT.jar(dotty/tools/dotc/typer/Checking$$anon$2.class)'
+ // class file 'target/scala-2.11/dotty_2.11-0.1.1-SNAPSHOT.jar(dotty/tools/dotc/typer/Checking$$anon$2.class)'
// has location not matching its contents: contains class $anon
@Test def dotc_util = compileDir(dotcDir, "util") // twice omitted to make tests run faster
@@ -260,7 +301,6 @@ class tests extends CompilerTest {
dotcDir + "config/PathResolver.scala"
), List(/* "-Ylog:frontend", */ "-Xprompt") ++ staleSymbolError ++ twice)
- val javaDir = "./tests/pos-java-interop/"
@Test def java_all = compileFiles(javaDir, twice)
//@Test def dotc_compilercommand = compileFile(dotcDir + "config/", "CompilerCommand")
@@ -349,9 +389,10 @@ class tests extends CompilerTest {
@Test def tasty_tests = compileDir(testsDir, "tasty", testPickling)
@Test def tasty_bootstrap = {
- val opt = List("-priorityclasspath", defaultOutputDir, "-Ylog-classpath")
+ val logging = if (false) List("-Ylog-classpath", "-verbose") else Nil
+ val opt = List("-priorityclasspath", defaultOutputDir) ++ logging
// first compile dotty
- compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant", "-strict"))(allowDeepSubtypes)
+ compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ++ logging)(allowDeepSubtypes)
compileDir(libDir, "dotty", "-deep" :: opt)
compileDir(libDir, "scala", "-deep" :: opt)
diff --git a/compiler/test/dotty/Jars.scala b/compiler/test/dotty/Jars.scala
index 42c707069..6fc9b0fde 100644
--- a/compiler/test/dotty/Jars.scala
+++ b/compiler/test/dotty/Jars.scala
@@ -3,15 +3,15 @@ package dotty
/** Jars used when compiling test, defaults to sbt locations */
object Jars {
val dottyLib: String = sys.env.get("DOTTY_LIB") getOrElse {
- "../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar"
+ "../library/target/scala-2.11/dotty-library_2.11-0.1.1-SNAPSHOT.jar"
}
val dottyCompiler: String = sys.env.get("DOTTY_COMPILER") getOrElse {
- "./target/scala-2.11/dotty-compiler_2.11-0.1-SNAPSHOT.jar"
+ "./target/scala-2.11/dotty-compiler_2.11-0.1.1-SNAPSHOT.jar"
}
val dottyInterfaces: String = sys.env.get("DOTTY_INTERFACE") getOrElse {
- "../interfaces/target/dotty-interfaces-0.1-SNAPSHOT.jar"
+ "../interfaces/target/dotty-interfaces-0.1.1-SNAPSHOT.jar"
}
val dottyExtras: List[String] = sys.env.get("DOTTY_EXTRAS")
diff --git a/compiler/test/dotty/tools/dotc/CompilerTest.scala b/compiler/test/dotty/tools/dotc/CompilerTest.scala
index 5192ec84c..eaa0bea84 100644
--- a/compiler/test/dotty/tools/dotc/CompilerTest.scala
+++ b/compiler/test/dotty/tools/dotc/CompilerTest.scala
@@ -228,8 +228,9 @@ abstract class CompilerTest {
} else {
val destDir = Directory(DPConfig.testRoot + JFile.separator + testName)
files.foreach({ file =>
- val jfile = new JFile(file)
- recCopyFiles(jfile, destDir / jfile.getName)
+ val sourceFile = new JFile(file)
+ val destFile = destDir / (if (file.startsWith("../")) file.substring(3) else file)
+ recCopyFiles(sourceFile, destFile)
})
compileDir(DPConfig.testRoot + JFile.separator, testName, args)
destDir.deleteRecursively
diff --git a/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala b/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
index e31ef2160..32f842e92 100644
--- a/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
+++ b/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
@@ -140,12 +140,12 @@ class ModifiersParsingTest {
source = "def f(implicit a: Int, b: Int) = ???"
println(source.defParam(0).modifiers)
- assert(source.defParam(0).modifiers == List(Mod.Implicit(Flags.Implicit)))
- assert(source.defParam(1).modifiers == List(Mod.Implicit(Flags.Implicit)))
+ assert(source.defParam(0).modifiers == List(Mod.Implicit()))
+ assert(source.defParam(1).modifiers == List(Mod.Implicit()))
source = "def f(x: Int, y: Int)(implicit a: Int, b: Int) = ???"
assert(source.defParam(0, 0).modifiers == List())
- assert(source.defParam(1, 0).modifiers == List(Mod.Implicit(Flags.Implicit)))
+ assert(source.defParam(1, 0).modifiers == List(Mod.Implicit()))
}
@Test def blockDef = {
diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
index c77ba501f..d9a5d8a38 100644
--- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
+++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
@@ -9,9 +9,9 @@ import dotty.tools.dotc.Main
import dotty.tools.dotc.reporting.TestReporter
class PatmatExhaustivityTest {
- val testsDir = "./tests/patmat"
+ val testsDir = "../tests/patmat"
// stop-after: patmatexhaust-huge.scala crash compiler
- val options = List("-color:never", "-Ystop-after:splitter", "-Ycheck-all-patmat")
+ val options = List("-color:never", "-Ystop-after:splitter", "-Ycheck-all-patmat") ++ (new dotc.tests).classPath
private def compileFile(file: File) = {
val stringBuffer = new StringWriter()