summaryrefslogtreecommitdiff
path: root/src/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler')
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala7
-rw-r--r--src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala50
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala2
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala14
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala1
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala31
-rw-r--r--src/compiler/scala/reflect/internal/TreeGen.scala27
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala9
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala15
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala40
-rw-r--r--src/compiler/scala/tools/nsc/ast/Reifiers.scala151
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala94
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala639
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala313
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala196
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala147
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala77
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala324
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala70
38 files changed, 1523 insertions, 882 deletions
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index b4b0a7335d..d043230e48 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -70,8 +70,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
tpnme.Float -> FLOAT_TAG,
tpnme.Double -> DOUBLE_TAG,
tpnme.Boolean -> BOOL_TAG,
- tpnme.Unit -> VOID_TAG,
- tpnme.Object -> OBJECT_TAG
+ tpnme.Unit -> VOID_TAG
)
private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
@@ -80,7 +79,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def boxedName(name: Name) = sn.Boxed(name.toTypeName)
- lazy val abbrvTag = symbolsMap(ObjectClass :: ScalaValueClasses, nameToTag)
+ lazy val abbrvTag = symbolsMap(ScalaValueClasses, nameToTag) withDefaultValue OBJECT_TAG
lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
@@ -213,7 +212,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
- def AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef)
+ lazy val AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef)
@deprecated("Use AnyRefModule", "2.10.0")
def Predef_AnyRef = AnyRefModule
diff --git a/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
new file mode 100644
index 0000000000..47f794681c
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -0,0 +1,50 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import util._
+
+/** The name of this trait defines the eventual intent better than
+ * it does the initial contents.
+ */
+trait ExistentialsAndSkolems {
+ self: SymbolTable =>
+
+ /** Map a list of type parameter symbols to skolemized symbols, which
+ * can be deskolemized to the original type parameter. (A skolem is a
+ * representation of a bound variable when viewed inside its scope.)
+ * !!!Adriaan: this does not work for hk types.
+ */
+ def deriveFreshSkolems(tparams: List[Symbol]): List[Symbol] = {
+ class Deskolemizer extends LazyType {
+ override val typeParams = tparams
+ val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
+ override def complete(sym: Symbol) {
+ // The info of a skolem is the skolemized info of the
+ // actual type parameter of the skolem
+ sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
+ }
+ }
+ (new Deskolemizer).typeSkolems
+ }
+
+ /** Convert to corresponding type parameters all skolems of method
+ * parameters which appear in `tparams`.
+ */
+ def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
+ class DeSkolemizeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
+ mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ new DeSkolemizeMap mapOver tp
+ }
+}
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index c232e3b7c1..1ae4f755ed 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -286,7 +286,7 @@ trait Importers { self: SymbolTable =>
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
def importImportSelector(sel: from.ImportSelector): ImportSelector =
- new ImportSelector(importName(sel.name), sel.namePos, importName(sel.rename), sel.renamePos)
+ new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
def importTree(tree: from.Tree): Tree = {
val mytree = tree match {
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index 97a74c2383..c4ee7254dc 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -76,12 +76,14 @@ trait NameManglers {
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
val SINGLETON_SUFFIX = ".type"
- val SPECIALIZED_SUFFIX_STRING = "$sp"
val SUPER_PREFIX_STRING = "super$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ val SETTER_SUFFIX: TermName = encode("_=")
- val SETTER_SUFFIX: TermName = encode("_=")
- val SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX_STRING
+ @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
+ @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
@@ -120,7 +122,7 @@ trait NameManglers {
}
def unspecializedName(name: Name): Name = (
- if (name endsWith SPECIALIZED_SUFFIX_NAME)
+ if (name endsWith SPECIALIZED_SUFFIX)
name.subName(0, name.lastIndexOf('m') - 1)
else name
)
@@ -140,8 +142,8 @@ trait NameManglers {
* and another one belonging to the enclosing class, on Double.
*/
def splitSpecializedName(name: Name): (Name, String, String) =
- if (name endsWith SPECIALIZED_SUFFIX_NAME) {
- val name1 = name dropRight SPECIALIZED_SUFFIX_NAME.length
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX.length
val idxC = name1 lastIndexOf 'c'
val idxM = name1 lastIndexOf 'm'
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 1f67bbc0ac..f61fe7a457 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -99,6 +99,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val MODULE_VAR_SUFFIX: NameType = "$module"
val ROOT: NameType = "<root>"
val PACKAGE: NameType = "package"
+ val SPECIALIZED_SUFFIX: NameType = "$sp"
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
@@ -330,6 +331,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val freeValue : NameType = "freeValue"
val genericArrayOps: NameType = "genericArrayOps"
val get: NameType = "get"
+ val getOrElse: NameType = "getOrElse"
val hasNext: NameType = "hasNext"
val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
val hash_ : NameType = "hash"
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 1973a97279..b3c62bffbf 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -15,6 +15,7 @@ abstract class SymbolTable extends api.Universe
with Symbols
with Types
with Kinds
+ with ExistentialsAndSkolems
with Scopes
with Definitions
with Constants
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 77ed2f6a1b..ce85d65050 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -1023,8 +1023,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Modifies this symbol's info in place. */
def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
/** Substitute second list of symbols for first in current info. */
- def substInfo(syms0: List[Symbol], syms1: List[Symbol]) = modifyInfo(_.substSym(syms0, syms1))
- def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
+ def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
+ if (syms0.isEmpty) this
+ else modifyInfo(_.substSym(syms0, syms1))
+
+ def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
/** Set the info and enter this symbol into the owner's scope. */
def setInfoAndEnter(info: Type): this.type = {
@@ -1380,15 +1383,25 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
cloneSymbol(owner)
/** A clone of this symbol, but with given owner. */
- final def cloneSymbol(owner: Symbol): Symbol = cloneSymbol(owner, this.rawflags)
- final def cloneSymbol(owner: Symbol, newFlags: Long): Symbol = {
- val newSym = cloneSymbolImpl(owner, newFlags)
- ( newSym
+ final def cloneSymbol(newOwner: Symbol): Symbol =
+ cloneSymbol(newOwner, this.rawflags)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long): Symbol =
+ cloneSymbol(newOwner, newFlags, nme.NO_NAME)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long, newName: Name): Symbol = {
+ val clone = cloneSymbolImpl(newOwner, newFlags)
+ ( clone
setPrivateWithin privateWithin
- setInfo (info cloneInfo newSym)
+ setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
+ if (clone.thisSym != clone)
+ clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
+ if (newName != nme.NO_NAME)
+ clone.name = newName
+
+ clone
}
+
/** Internal method to clone a symbol's implementation with the given flags and no info. */
def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol
def cloneSymbolImpl(owner: Symbol): Symbol = cloneSymbolImpl(owner, 0L)
@@ -2324,7 +2337,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+debugLocationString)
- override def name: TypeName = super.name.asInstanceOf[TypeName]
+ override def name: TypeName = super.name.toTypeName
final override def isType = true
override def isNonClassType = true
override def isAbstractType = {
@@ -2701,6 +2714,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
cloneSymbols(syms) map (_ modifyInfo infoFn)
+ def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
+ cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
/** Functions which perform the standard clone/substituting on the given symbols and type,
* then call the creator function with the new symbols and type as arguments.
diff --git a/src/compiler/scala/reflect/internal/TreeGen.scala b/src/compiler/scala/reflect/internal/TreeGen.scala
index e537c6b83f..cc882ad5ed 100644
--- a/src/compiler/scala/reflect/internal/TreeGen.scala
+++ b/src/compiler/scala/reflect/internal/TreeGen.scala
@@ -250,20 +250,19 @@ abstract class TreeGen {
* var x: T = _
* which is appropriate to the given Type.
*/
- def mkZero(tp: Type): Tree = {
- val tree = tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => Literal(Constant(null))
- }
- tree setType tp
+ def mkZero(tp: Type): Tree = Literal(mkConstantZero(tp)) setType tp
+
+ def mkConstantZero(tp: Type): Constant = tp.typeSymbol match {
+ case UnitClass => Constant(())
+ case BooleanClass => Constant(false)
+ case FloatClass => Constant(0.0f)
+ case DoubleClass => Constant(0.0d)
+ case ByteClass => Constant(0.toByte)
+ case ShortClass => Constant(0.toShort)
+ case IntClass => Constant(0)
+ case LongClass => Constant(0L)
+ case CharClass => Constant(0.toChar)
+ case _ => Constant(null)
}
def mkZeroContravariantAfterTyper(tp: Type): Tree = {
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 958d04732b..b6b7e3cbda 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -224,18 +224,19 @@ trait Trees extends api.Trees { self: SymbolTable =>
LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
}
-
/** casedef shorthand */
def CaseDef(pat: Tree, body: Tree): CaseDef = CaseDef(pat, EmptyTree, body)
def Bind(sym: Symbol, body: Tree): Bind =
Bind(sym.name, body) setSymbol sym
- /** 0-1 argument list new, based on a symbol.
+ /** 0-1 argument list new, based on a symbol or type.
*/
def New(sym: Symbol, args: Tree*): Tree =
- if (args.isEmpty) New(TypeTree(sym.tpe))
- else New(TypeTree(sym.tpe), List(args.toList))
+ New(sym.tpe, args: _*)
+
+ def New(tpe: Type, args: Tree*): Tree =
+ New(TypeTree(tpe), List(args.toList))
def Apply(sym: Symbol, args: Tree*): Tree =
Apply(Ident(sym), args.toList)
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index adf9df185a..04efe04636 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -680,7 +680,7 @@ trait Types extends api.Types { self: SymbolTable =>
* symbol.
*/
def substSym(from: List[Symbol], to: List[Symbol]): Type =
- if (from eq to) this
+ if ((from eq to) || from.isEmpty) this
else new SubstSymMap(from, to) apply this
/** Substitute all occurrences of `ThisType(from)` in this type by `to`.
@@ -5381,9 +5381,9 @@ trait Types extends api.Types { self: SymbolTable =>
val params2 = mt2.params
val res2 = mt2.resultType
(sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- (res1 <:< res2.substSym(params2, params1)) &&
- mt1.isImplicit == mt2.isImplicit)
+ (res1 <:< res2.substSym(params2, params1)))
// TODO: if mt1.params.isEmpty, consider NullaryMethodType?
case _ =>
false
@@ -5503,9 +5503,9 @@ trait Types extends api.Types { self: SymbolTable =>
tp2 match {
case mt2 @ MethodType(params2, res2) =>
// sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performancewise?)
- matchesQuantified(params1, params2, res1, res2) &&
+ mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- mt1.isImplicit == mt2.isImplicit
+ matchesQuantified(params1, params2, res1, res2)
case NullaryMethodType(res2) =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
@@ -5532,7 +5532,10 @@ trait Types extends api.Types { self: SymbolTable =>
case PolyType(tparams1, res1) =>
tp2 match {
case PolyType(tparams2, res2) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
+ if ((tparams1 corresponds tparams2)(_ eq _))
+ matchesType(res1, res2, alwaysMatchSimple)
+ else
+ matchesQuantified(tparams1, tparams2, res1, res2)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
case _ =>
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 880c68eaa0..f52662ce6f 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -64,7 +64,7 @@ trait ToolBoxes extends { self: Universe =>
obj setInfo obj.moduleClass.tpe
val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName))
def makeParam(fv: Symbol) = meth.newValueParameter(fv.name.toTermName) setInfo fv.tpe
- meth setInfo MethodType(fvs map makeParam, expr.tpe)
+ meth setInfo MethodType(fvs map makeParam, AnyClass.tpe)
minfo.decls enter meth
trace("wrapping ")(defOwner(expr) -> meth)
val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth))
@@ -94,6 +94,20 @@ trait ToolBoxes extends { self: Universe =>
}
def compileExpr(expr: Tree, fvs: List[Symbol]): String = {
+ // Previously toolboxes used to typecheck their inputs before compiling.
+ // Actually, the initial demo by Martin first typechecked the reified tree,
+ // then ran it, which typechecked it again, and only then launched the
+ // reflective compiler.
+ //
+ // However, as observed in https://issues.scala-lang.org/browse/SI-5464
+ // current implementation typechecking is not always idempotent.
+ // That's why we cannot allow inputs of toolboxes to be typechecked,
+ // at least not until the aforementioned issue is closed.
+ val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
+ if (!typed.isEmpty) {
+ throw new Error("cannot compile trees that are already typed")
+ }
+
val mdef = wrapInObject(expr, fvs)
val pdef = wrapInPackage(mdef)
val unit = wrapInCompilationUnit(pdef)
@@ -106,7 +120,6 @@ trait ToolBoxes extends { self: Universe =>
jclazz.getDeclaredMethods.find(_.getName == name).get
def runExpr(expr: Tree): Any = {
- val etpe = expr.tpe
val fvs = (expr filter isFree map (_.symbol)).distinct
reporter.reset()
@@ -181,19 +194,13 @@ trait ToolBoxes extends { self: Universe =>
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader)
- private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = {
+ def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = {
+ if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType)
val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type])
-// val typer = compiler.typer.atOwner(ctree, if (owner.isModule) cowner.moduleClass else cowner)
val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt)
- ttree
- }
-
- def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = {
- if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType)
- val ttree = importAndTypeCheck(tree, expectedType)
- val ettree = exporter.importTree(ttree).asInstanceOf[rm.Tree]
- ettree
+ val rmttree = exporter.importTree(ttree).asInstanceOf[rm.Tree]
+ rmttree
}
def typeCheck(tree: rm.Tree): rm.Tree =
@@ -202,11 +209,10 @@ trait ToolBoxes extends { self: Universe =>
def showAttributed(tree: rm.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]), printTypes, printIds, printKinds)
- def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = {
- val ttree = importAndTypeCheck(tree, expectedType)
- compiler.runExpr(ttree)
+ def runExpr(tree: rm.Tree): Any = {
+ if (compiler.settings.verbose.value) println("running "+tree)
+ val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
+ compiler.runExpr(ctree)
}
-
- def runExpr(tree: rm.Tree): Any = runExpr(tree, WildcardType.asInstanceOf[rm.Type])
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
index 91d5d2bf4a..7ece8bbd31 100644
--- a/src/compiler/scala/tools/nsc/ast/Reifiers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
@@ -8,6 +8,7 @@ package ast
import symtab._
import Flags._
+import scala.reflect.api.Modifier._
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import scala.tools.nsc.util.FreshNameCreator
@@ -289,10 +290,102 @@ trait Reifiers { self: Global =>
var reifySymbols = false
var reifyTypes = false
+ /** Preprocess a tree before reification */
+ private def trimTree(tree: Tree): Tree = {
+ def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]) = {
+ var stats1 = stats filterNot (stat => stat.isDef && {
+ if (stat.symbol.isCaseAccessorMethod && reifyDebug) println("discarding case accessor method: " + stat)
+ stat.symbol.isCaseAccessorMethod
+ })
+ stats1 = stats1 filterNot (memberDef => memberDef.isDef && {
+ val isSynthetic = memberDef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes
+// val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass
+ val isCaseMember = true
+ if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef)
+ isSynthetic && isCaseMember
+ })
+ stats1 = stats1 map {
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isCaseAccessor =>
+ if (reifyDebug) println("resetting visibility of case accessor field: " + valdef)
+ val Modifiers(flags, privateWithin, annotations) = mods
+ val flags1 = flags & ~Flags.LOCAL & ~Flags.PRIVATE
+ val mods1 = Modifiers(flags1, privateWithin, annotations)
+ ValDef(mods1, name, tpt, rhs).copyAttrs(valdef)
+ case stat =>
+ stat
+ }
+ stats1
+ }
+
+ def trimSyntheticCaseClassCompanions(stats: List[Tree]) =
+ stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => {
+ val isSynthetic = moddef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes
+// val isCaseCompanion = moddef.symbol.companionClass.isCaseClass
+ val isCaseCompanion = true
+ // @xeno.by: we also have to do this ugly hack for the very same reason described above
+ // normally this sort of stuff is performed in reifyTree, which binds related symbols, however, local companions will be out of its reach
+ if (reifyDebug) println("boundSym: "+ moddef.symbol)
+ boundSyms += moddef.symbol
+ if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef)
+ isSynthetic && isCaseCompanion
+ }))
+
+ tree match {
+ case tree if tree.isErroneous =>
+ tree
+ case ta @ TypeApply(hk, ts) =>
+ def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null
+ val discard = ts collect { case tt: TypeTree => tt } exists isErased
+ if (reifyDebug && discard) println("discarding TypeApply: " + tree)
+ if (discard) hk else ta
+ case classDef @ ClassDef(mods, name, params, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(classDef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ClassDef(mods, name, params, impl1).copyAttrs(classDef)
+ case moduledef @ ModuleDef(mods, name, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(moduledef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ModuleDef(mods, name, impl1).copyAttrs(moduledef)
+ case template @ Template(parents, self, body) =>
+ val body1 = trimSyntheticCaseClassCompanions(body)
+ Template(parents, self, body1).copyAttrs(template)
+ case block @ Block(stats, expr) =>
+ val stats1 = trimSyntheticCaseClassCompanions(stats)
+ Block(stats1, expr).copyAttrs(block)
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isLazy =>
+ if (reifyDebug) println("dropping $lzy in lazy val's name: " + tree)
+ val name1 = if (name endsWith nme.LAZY_LOCAL) name dropRight nme.LAZY_LOCAL.length else name
+ ValDef(mods, name1, tpt, rhs).copyAttrs(valdef)
+ case unapply @ UnApply(fun, args) =>
+ def extractExtractor(tree: Tree): Tree = {
+ val Apply(fun, args) = tree
+ args match {
+ case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
+ val Select(extractor, flavor) = fun
+ assert(flavor == nme.unapply || flavor == nme.unapplySeq)
+ extractor
+ case _ =>
+ extractExtractor(fun)
+ }
+ }
+
+ if (reifyDebug) println("unapplying unapply: " + tree)
+ val fun1 = extractExtractor(fun)
+ Apply(fun1, args).copyAttrs(unapply)
+ case _ =>
+ tree
+ }
+ }
+
/** Reify a tree */
- private def reifyTree(tree: Tree): Tree = {
- def reifyDefault(tree: Tree) =
- reifyProduct(tree)
+ private def reifyTree(tree0: Tree): Tree = {
+ val tree = trimTree(tree0)
var rtree = tree match {
case tree if tree.isErroneous =>
@@ -311,29 +404,24 @@ trait Reifiers { self: Global =>
} else reifyFree(tree)
case tt: TypeTree if (tt.tpe != null) =>
reifyTypeTree(tt)
- case ta @ TypeApply(hk, ts) =>
- def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null
- val discard = ts collect { case tt: TypeTree => tt } exists isErased
- if (reifyDebug && discard) println("discarding TypeApply: " + tree)
- if (discard) reifyTree(hk) else reifyDefault(ta)
case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) =>
CannotReifyClassOfBoundType(tree, tpe)
case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym =>
CannotReifyClassOfBoundEnum(tree, constant.tpe)
case tree if tree.isDef =>
if (reifyDebug) println("boundSym: %s of type %s".format(tree.symbol, (tree.productIterator.toList collect { case tt: TypeTree => tt } headOption).getOrElse(TypeTree(tree.tpe))))
- // registerReifiableSymbol(tree.symbol)
boundSyms += tree.symbol
- if (tree.symbol.sourceModule != NoSymbol) {
- if (reifyDebug) println("boundSym (sourceModule): " + tree.symbol.sourceModule)
- boundSyms += tree.symbol.sourceModule
- }
-
- if (tree.symbol.moduleClass != NoSymbol) {
- if (reifyDebug) println("boundSym (moduleClass): " + tree.symbol.moduleClass)
- boundSyms += tree.symbol.moduleClass
- }
+ bindRelatedSymbol(tree.symbol.sourceModule, "sourceModule")
+ bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass")
+ bindRelatedSymbol(tree.symbol.companionClass, "companionClass")
+ bindRelatedSymbol(tree.symbol.companionModule, "companionModule")
+ Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") }
+ def bindRelatedSymbol(related: Symbol, name: String): Unit =
+ if (related != null && related != NoSymbol) {
+ if (reifyDebug) println("boundSym (" + name + "): " + related)
+ boundSyms += related
+ }
val prefix = tree.productPrefix
val elements = (tree.productIterator map {
@@ -354,7 +442,7 @@ trait Reifiers { self: Global =>
}).toList
reifyProduct(prefix, elements)
case _ =>
- reifyDefault(tree)
+ reifyProduct(tree)
}
// usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
@@ -396,10 +484,8 @@ trait Reifiers { self: Global =>
*
* This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked.
* Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees.
- * To the moment I know two such situations:
- * 1) Unapplies: https://issues.scala-lang.org/browse/SI-5273?focusedCommentId=56057#comment-56057
- * 2) Annotations: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless
- * 3) <sigh, what will I discover next?>
+ * To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless.
+ * This is laboriously worked around in the code below. I hope this will be the only workaround in this department.
*/
private def reifyTypeTree(tt: TypeTree): Tree = {
if (definedInLiftedCode(tt.tpe)) {
@@ -441,14 +527,15 @@ trait Reifiers { self: Global =>
}
} else {
var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe))
- // @xeno.by: originals get typechecked during subsequent reflective compilation, which leads to subtle bugs
- // https://issues.scala-lang.org/browse/SI-5273?focusedCommentId=56057#comment-56057
- // until this is somehow sorted out, I disable reification of originals
- // if (tt.original != null) {
- // val setOriginal = Select(rtt, newTermName("setOriginal"))
- // val reifiedOriginal = reify(tt.original)
- // rtt = Apply(setOriginal, List(reifiedOriginal))
- // }
+ // @xeno.by: temporarily disabling reification of originals
+ // subsequent reflective compilation will try to typecheck them
+ // and this means that the reifier has to do additional efforts to ensure that this will succeed
+ // additional efforts + no clear benefit = will be implemented later
+// if (tt.original != null) {
+// val setOriginal = Select(rtt, newTermName("setOriginal"))
+// val reifiedOriginal = reify(tt.original)
+// rtt = Apply(setOriginal, List(reifiedOriginal))
+// }
rtt
}
}
@@ -493,7 +580,7 @@ trait Reifiers { self: Global =>
ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
}
- New(TypeTree(ann.atp), List(args))
+ New(ann.atp, args: _*)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 2cfd21ecc8..f361d45018 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -253,10 +253,10 @@ trait TreeDSL {
}
/** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(New(TypeTree(MatchErrorClass.tpe), List(List(arg))))
+ def MATCHERROR(arg: Tree) = Throw(New(MatchErrorClass, arg))
/** !!! should generalize null guard from match error here. */
- def THROW(sym: Symbol): Throw = Throw(New(TypeTree(sym.tpe), List(Nil)))
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(TypeTree(sym.tpe), List(List(msg.TOSTRING()))))
+ def THROW(sym: Symbol): Throw = Throw(New(sym))
+ def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(sym, msg.TOSTRING()))
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
def NEW(sym: Symbol, args: Tree*): Tree = New(sym, args: _*)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 265d017653..c7414bf34b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -13,7 +13,7 @@ import symtab.SymbolTable
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen extends reflect.internal.TreeGen {
+abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
val global: Global
import global._
@@ -57,7 +57,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
// annotate the expression with @unchecked
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
- // are very pick about things and it crashes the compiler with "unexpected new".
+ // are very picky about things and it crashes the compiler with "unexpected new".
Annotated(New(scalaDot(UncheckedClass.name), List(Nil)), expr)
}
// if it's a Match, mark the selector unchecked; otherwise nothing.
@@ -66,18 +66,81 @@ abstract class TreeGen extends reflect.internal.TreeGen {
case _ => tree
}
- def withDefaultCase(matchExpr: Tree, defaultAction: Tree/*scrutinee*/ => Tree): Tree = matchExpr match {
- case Match(scrutinee, cases) =>
- if (cases exists treeInfo.isDefaultCase) matchExpr
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(scrutinee))
- Match(scrutinee, cases :+ defaultCase)
+ // must be kept in synch with the codegen in PatMatVirtualiser
+ object VirtualCaseDef {
+ def unapply(b: Block): Option[(Assign, Tree, Tree)] = b match {
+ case Block(List(assign@Assign(keepGoingLhs, falseLit), matchRes), zero) => Some((assign, matchRes, zero)) // TODO: check tree annotation
+ case _ => None
+ }
+ }
+
+ // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
+ // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
+ class MatchMatcher {
+ def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
+
+ def apply(matchExpr: Tree): Tree = (matchExpr: @unchecked) match {
+ // old-style match or virtpatmat switch
+ case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, identity)
+ // old-style match or virtpatmat switch
+ case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
+ // virtpatmat
+ case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
+ caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
+ // optimized version of virtpatmat
+ case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue) if opt.virtPatmat => // TODO: check tree annotation // println("virtopt match: "+ (zero, x, matchRes, keepGoing, stats) + "for:\n"+ matchExpr )
+ caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, identity)
+ // optimized version of virtpatmat
+ case Block(outerStats, orig@Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, epilogue)) if opt.virtPatmat => // TODO: check tree annotation // println("virt opt block match: "+ (zero, x, matchRes, keepGoing, stats, outerStats) + "for:\n"+ matchExpr )
+ caseVirtualizedMatchOpt(matchExpr, zero, x, matchRes, keepGoing, stats, epilogue, m => copyBlock(matchExpr, outerStats, m))
+ case other =>
+ unknownTree(other)
+ }
+
+ def unknownTree(t: Tree): Tree = throw new MatchError(t)
+ def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
+
+ def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
+ if (!opt.virtPatmat) cases
+ else cases filter {
+ case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
+ case CaseDef(pat, guard, body) => true
+ }
+ }
+
+ def withDefaultCase(matchExpr: Tree, defaultAction: Tree/*scrutinee*/ => Tree): Tree = {
+ object withDefaultTransformer extends MatchMatcher {
+ override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = {
+ val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+ if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) orig
+ else {
+ val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
+ wrap(Match(selector, casesNoSynthCatchAll :+ defaultCase))
+ }
+ }
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = { import CODE._
+ ((matcher APPLY (scrut)) DOT nme.getOrElse) APPLY (defaultAction(scrut.duplicate)) // TODO: pass targs
+ }
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree): Tree = { import CODE._
+ wrap(Block(
+ zero ::
+ x ::
+ matchRes ::
+ keepGoing ::
+ stats,
+ // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `if (keepGoing) ${defaultAction(`x`)} else matchRes`
+ (IF (REF(keepGoing.symbol)) THEN defaultAction(x.rhs.duplicate) ELSE REF(matchRes.symbol))
+ ))
}
- case _ =>
- matchExpr
- // [Martin] Adriaan: please fill in virtpatmat transformation here
+ }
+ withDefaultTransformer(matchExpr)
}
+
def mkCached(cvar: Symbol, expr: Tree): Tree = {
val cvarRef = mkUnattributedRef(cvar)
Block(
@@ -118,10 +181,11 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
DefDef(accessor, Select(This(msym.owner), msym))
- def newModule(accessor: Symbol, tpe: Type) =
- New(TypeTree(tpe),
- List(for (pt <- tpe.typeSymbol.primaryConstructor.info.paramTypes)
- yield This(accessor.owner.enclClass)))
+ def newModule(accessor: Symbol, tpe: Type) = {
+ val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
+ if (ps.isEmpty) New(tpe)
+ else New(tpe, This(accessor.owner.enclClass))
+ }
// def m: T;
def mkModuleAccessDcl(accessor: Symbol) =
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index fe6dcc9138..e3a59058a3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1205,7 +1205,7 @@ self =>
*/
def wildcardType(start: Int) = {
val pname = freshTypeName("_$")
- val t = atPos(start) { Ident(pname) }
+ val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) }
placeholderTypes = param :: placeholderTypes
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index dae264fffe..f712c7411f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -180,7 +180,7 @@ trait Scanners extends ScannersCommon {
* @pre: inStringInterpolation
*/
@inline private def inMultiLineInterpolation =
- sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
/** read next token and return last offset
*/
@@ -217,7 +217,9 @@ trait Scanners extends ScannersCommon {
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
case STRINGLIT =>
- if (inStringInterpolation)
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
sepRegions = sepRegions.tail
case _ =>
}
@@ -386,7 +388,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\"') {
nextRawChar()
getStringPart(multiLine = true)
- sepRegions = STRINGLIT :: sepRegions // indicate string part
+ sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
token = STRINGLIT
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 13f608ed4e..ad93b4753f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -470,15 +470,11 @@ abstract class TreeBuilder {
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
makeVisitor(cases, checkExhaustive, "x$")
- private def makeUnchecked(expr: Tree): Tree = atPos(expr.pos) {
- Annotated(New(scalaDot(definitions.UncheckedClass.name), List(Nil)), expr)
- }
-
/** Create visitor <x => x match cases> */
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
- val x = freshTermName(prefix)
- val id = Ident(x)
- val sel = if (checkExhaustive) id else makeUnchecked(id)
+ val x = freshTermName(prefix)
+ val id = Ident(x)
+ val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
Function(List(makeSyntheticParam(x)), Match(sel, cases))
}
@@ -563,7 +559,7 @@ abstract class TreeBuilder {
val vars = getVariables(pat1)
val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
Match(
- makeUnchecked(rhs),
+ gen.mkUnchecked(rhs),
List(
atPos(pat1.pos) {
CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident, true))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 6aee52a354..b9675b8270 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -94,7 +94,7 @@ abstract class GenICode extends SubComponent {
// !! modules should be eliminated by refcheck... or not?
case ModuleDef(mods, name, impl) =>
- abort("Modules should not reach backend!")
+ abort("Modules should not reach backend! " + tree)
case ValDef(mods, name, tpt, rhs) =>
ctx // we use the symbol to add fields
@@ -393,15 +393,15 @@ abstract class GenICode extends SubComponent {
for (CaseDef(pat, _, body) <- catches.reverse) yield {
def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
(sym, kind, ctx => {
- ctx.bb.emit(DROP(REFERENCE(sym)))
+ ctx.bb.emit(DROP(REFERENCE(sym))) // drop the loaded exception
genLoad(body, ctx, kind)
})
pat match {
case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
- case Bind(name, _) =>
- val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false)
+ case Bind(_, _) =>
+ val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false) // the exception will be loaded and stored into this local
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
@@ -704,7 +704,8 @@ abstract class GenICode extends SubComponent {
ctx1
case New(tpt) =>
- abort("Unexpected New")
+ abort("Unexpected New(" + tpt.summaryString + "/" + tpt + ") received in icode.\n" +
+ " Call was genLoad" + ((tree, ctx, expectedType)))
case Apply(TypeApply(fun, targs), _) =>
val sym = fun.symbol
@@ -875,7 +876,23 @@ abstract class GenICode extends SubComponent {
val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
generatedType = resKind
newCtx
- } else { // normal method call
+ } else if (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)) {
+ // XXX settings.noassertions.value temporarily retained to avoid
+ // breakage until a reasonable interface is settled upon.
+ debuglog("Eliding call from " + tree.symbol.owner + " to " + sym +
+ " based on its elision threshold of " + sym.elisionLevel.get)
+ if (expectedType.isValueType) {
+ ctx.bb.emit(CONSTANT(global.gen.mkConstantZero(expectedType.toType)), tree.pos)
+ generatedType = expectedType
+ }
+ else if (expectedType.isNothingType) unit.error(tree.pos, "Cannot elide where Nothing is required.")
+ else {
+ ctx.bb.emit(CONSTANT(Constant(null)), tree.pos)
+ generatedType = NullReference
+ }
+ ctx
+ }
+ else { // normal method call
debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
val invokeStyle =
if (sym.isStaticMember)
@@ -1054,7 +1071,7 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT)
+ var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
val afterCtx = ctx1.newBlock
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
@@ -2086,12 +2103,12 @@ abstract class GenICode extends SubComponent {
exh
}) else None
- val exhs = handlers.map { handler =>
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
+ val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
+ val exh = this.newExceptionHandler(sym, kind, tree.pos)
var ctx1 = outerCtx.enterExceptionHandler(exh)
ctx1.addFinalizer(finalizer, finalizerCtx)
loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
+ ctx1 = handler(ctx1)
// emit finalizer
val ctx2 = emitFinalizer(ctx1)
ctx2.bb.closeWith(JUMP(afterCtx.bb))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 60cb679782..9f43e1b84c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -60,20 +60,17 @@ trait DataFlowAnalysis[L <: SemiLattice] {
val output = f(point, in(point))
if ((lattice.bottom == out(point)) || output != out(point)) {
-// Console.println("Output changed at " + point
-// + " from: " + out(point) + " to: " + output
-// + " for input: " + in(point) + " and they are different: " + (output != out(point)))
+ // Console.println("Output changed at " + point
+ // + " from: " + out(point) + " to: " + output
+ // + " for input: " + in(point) + " and they are different: " + (output != out(point)))
out(point) = output
val succs = point.successors
succs foreach { p =>
- if (!worklist(p))
- worklist += p;
- if (!in.isDefinedAt(p))
- assert(false, "Invalid successor for: " + point + " successor " + p + " does not exist")
-// if (!p.exceptionHandlerHeader) {
-// println("lubbing " + p.predecessors + " outs: " + p.predecessors.map(out.apply).mkString("\n", "\n", ""))
- in(p) = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
-// }
+ val updated = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ if (!worklist(p)) { worklist += p; }
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 6421d6c8ef..877c51ebc1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -127,34 +127,6 @@ abstract class TypeFlowAnalysis {
}
}
- /** reinitialize the analysis, keeping around solutions from a previous run. */
- def reinit(m: icodes.IMethod) {
- if (this.method == null || this.method.symbol != m.symbol)
- init(m)
- else reinit {
- m foreachBlock { b =>
- if (!in.contains(b)) {
- for (p <- b.predecessors) {
- if (out.isDefinedAt(p)) {
- in(b) = out(p)
- worklist += p
- }
- /* else
- in(b) = typeFlowLattice.bottom
- */ }
- out(b) = typeFlowLattice.bottom
- }
- }
- for (handler <- m.exh) {
- val start = handler.startBlock
- if (!in.contains(start)) {
- worklist += start
- in(start) = lattice.IState(in(start).vars, typeStackLattice.exceptionHandlerStack)
- }
- }
- }
- }
-
def this(m: icodes.IMethod) {
this()
init(m)
@@ -162,7 +134,7 @@ abstract class TypeFlowAnalysis {
def run = {
timer.start
-// icodes.lubs0 = 0
+ // icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
val t = timer.stop
if (settings.debug.value) {
@@ -170,216 +142,35 @@ abstract class TypeFlowAnalysis {
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
-// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
-// + "\n\t" + iterations + " iterations: " + t + " ms."
-// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
+ // log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
+ // + "\n\t" + iterations + " iterations: " + t + " ms."
+ // + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- b.iterator.foldLeft(in)(interpret)
- }
- /** The flow function of a given basic block. */
- /* var flowFun: immutable.Map[BasicBlock, TransferFunction] = new immutable.HashMap */
-
- /** Fill flowFun with a transfer function per basic block. */
-/*
- private def buildFlowFunctions(blocks: List[BasicBlock]) {
- def transfer(b: BasicBlock): TransferFunction = {
- var gens: List[Gen] = Nil
- var consumed: Int = 0
- val stack = new SimulatedStack
-
- for (instr <- b) instr match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
-
- case LOAD_ARRAY_ITEM(kind) =>
- stack.pop2
- stack.push(kind)
-
- case LOAD_LOCAL(local) =>
- val t = bindings(local)
- stack push (if (t == typeLattice.bottom) local.kind else t)
-
- case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
- stack push toTypeKind(field.tpe)
-
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
-
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
-
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
-
- case CALL_PRIMITIVE(primitive) =>
- primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
- case Test(_, kind, zero) =>
- stack.pop
- if (!zero) stack.pop
- stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
-
- case Arithmetic(op, kind) =>
- stack.pop
- if (op != NOT)
- stack.pop
- val k = kind match {
- case BYTE | SHORT | CHAR => INT
- case _ => kind
- }
- stack push k
-
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
- }
-
- case CALL_METHOD(method, style) => style match {
- case Dynamic =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
-
- case Static(onInstance) =>
- if (onInstance) {
- stack.pop(1 + method.info.paramTypes.length)
- if (!method.isConstructor)
- stack.push(toTypeKind(method.info.resultType));
- } else {
- stack.pop(method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case SuperCall(mix) =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
-
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
-
- case NEW(kind) =>
- stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
-
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
-
- case SWITCH(tags, labels) =>
- stack.pop
-
- case JUMP(whereto) =>
- ()
-
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
-
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
-
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
-
- case THROW() =>
- stack.pop
-
- case DROP(kind) =>
- stack.pop
-
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case LOAD_EXCEPTION(_) =>
- stack.pop(stack.length)
- stack.push(typeLattice.Object)
-
- case _ =>
- dumpClassesAndAbort("Unknown instruction: " + i)
- }
-
- new TransferFunction(consumed, gens)
- }
-
- for (b <- blocks) {
- flowFun = flowFun + (b -> transfer(b))
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ var instrs = b.toList
+ while(!instrs.isEmpty) {
+ val i = instrs.head
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
}
+ result
}
-*/
+
/** Abstract interpretation for one instruction. */
def interpret(in: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ mutatingInterpret(out, i)
+ }
+
+ def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val bindings = out.vars
val stack = out.stack
if (settings.debug.value) {
-// Console.println("[before] Stack: " + stack);
-// Console.println(i);
+ // Console.println("[before] Stack: " + stack);
+ // Console.println(i);
}
i match {
@@ -619,11 +410,292 @@ abstract class TypeFlowAnalysis {
}
}
+ case class CallsiteInfo(bb: icodes.BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol)
+
+ /**
+
+ A full type-flow analysis on a method computes in- and out-flows for each basic block (that's what MethodTFA does).
+
+ For the purposes of Inliner, doing so guarantees that an abstract typestack-slot is available by the time an inlining candidate (a CALL_METHOD instruction) is visited.
+ This subclass (MTFAGrowable) of MethodTFA also aims at performing such analysis on CALL_METHOD instructions, with some differences:
+
+ (a) early screening is performed while the type-flow is being computed (in an override of `blockTransfer`) by testing a subset of the conditions that Inliner checks later.
+ The reasoning here is: if the early check fails at some iteration, there's no chance a follow-up iteration (with a yet more lub-ed typestack-slot) will succeed.
+ Failure is sufficient to remove that particular CALL_METHOD from the typeflow's `remainingCALLs`.
+ A forward note: in case inlining occurs at some basic block B, all blocks reachable from B get their CALL_METHOD instructions considered again as candidates
+ (because of the more precise types that -- perhaps -- can be computed).
+
+ (b) in case the early check does not fail, no conclusive decision can be made, thus the CALL_METHOD stays `isOnwatchlist`.
+
+ In other words, `remainingCALLs` tracks those callsites that still remain as candidates for inlining, so that Inliner can focus on those.
+ `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
+
+ Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
+ A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining.
+ But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
+ In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
+ Those basic blocks not in that subgraph can be skipped altogether. That's why:
+ - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
+ - same check is performed before adding a block to the worklist, and as part of choosing successors.
+ The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis.
+
+ The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
+
+ @author Miguel Garcia, http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+
+ */
class MTFAGrowable extends MethodTFA {
import icodes._
- /** discards what must be discarded, blanks what needs to be blanked out, and keeps the rest. */
+ val remainingCALLs = mutable.Map.empty[opcodes.CALL_METHOD, CallsiteInfo]
+
+ val preCandidates = mutable.Set.empty[BasicBlock]
+
+ var callerLin: Traversable[BasicBlock] = null
+
+ override def run {
+
+ timer.start
+ forwardAnalysis(blockTransfer)
+ val t = timer.stop
+
+ /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
+ whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
+ In order to keep `analyzeMethod()` simple, we collect in `preCandidates` those basic blocks containing at least one candidate. */
+ preCandidates.clear()
+ for(rc <- remainingCALLs) {
+ preCandidates += rc._2.bb
+ }
+
+ if (settings.debug.value) {
+ for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
+ assert(visited.contains(b),
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
+ }
+ }
+
+ }
+
+ var shrinkedWatchlist = false
+
+ /*
+ This is the method where information cached elsewhere is put to use. References are given those other places that populate those caches.
+
+ The goal is avoiding computing type-flows for blocks we don't need (ie blocks not tracked in `relevantBBs`). The method used to add to `relevantBBs` is `putOnRadar`.
+
+ Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block.
+ There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't.
+ The reasoning behind this decsision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
+ is querying `isOnPerimeter`.
+
+ Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use.
+ That is, unless the candidacy test fails. The reasoning here is: if such early check fails at some iteration, there's no chance a follow-up iteration
+ (with a yet more lub-ed typestack-slot) will succeed. In case of failure we can safely remove the CALL_METHOD from both `isOnWatchlist` and `remainingCALLs`.
+
+ */
+ override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ var isPastLast = false
+
+ var instrs = b.toList
+ while(!isPastLast && !instrs.isEmpty) {
+ val i = instrs.head
+
+ if(isOnWatchlist(i)) {
+ val cm = i.asInstanceOf[opcodes.CALL_METHOD]
+ val msym = cm.method
+ val paramsLength = msym.info.paramTypes.size
+ val receiver = result.stack.types.drop(paramsLength).head match {
+ case REFERENCE(s) => s
+ case _ => NoSymbol // e.g. the scrutinee is BOX(s) or ARRAY
+ }
+ val concreteMethod = inliner.lookupImplFor(msym, receiver)
+ val isCandidate = {
+ ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isEffectivelyFinal ) &&
+ !blackballed(concreteMethod)
+ }
+ if(isCandidate) {
+ remainingCALLs += Pair(cm, CallsiteInfo(b, receiver, result.stack.length, concreteMethod))
+ } else {
+ remainingCALLs.remove(cm)
+ isOnWatchlist.remove(cm)
+ shrinkedWatchlist = true
+ }
+ }
+
+ isPastLast = (i eq stopAt)
+
+ if(!isPastLast) {
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
+ }
+ }
+
+ result
+ } // end of method blockTransfer
+
+ val isOnWatchlist = mutable.Set.empty[Instruction]
+
+ /* Each time CallerCalleeInfo.isSafeToInline determines a concrete callee is unsafe to inline in the current caller,
+ the fact is recorded in this TFA instance for the purpose of avoiding devoting processing to that callsite next time.
+ The condition of "being unsafe to inline in the current caller" sticks across inlinings and TFA re-inits
+ because it depends on the instructions of the callee, which stay unchanged during the course of `analyzeInc(caller)`
+ (with the caveat of the side-effecting `makePublic` in `helperIsSafeToInline`).*/
+ val knownUnsafe = mutable.Set.empty[Symbol]
+ val knownSafe = mutable.Set.empty[Symbol]
+ val knownNever = mutable.Set.empty[Symbol] // `knownNever` needs be cleared only at the very end of the inlining phase (unlike `knownUnsafe` and `knownSafe`)
+ @inline final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
+
+ val relevantBBs = mutable.Set.empty[BasicBlock]
+
+ private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
+ val msym = cm.method
+ val style = cm.style
+ // Dynamic == normal invocations
+ // Static(true) == calls to private members
+ !msym.isConstructor && !blackballed(msym) &&
+ (style.isDynamic || (style.hasInstance && style.isStatic))
+ // && !(msym hasAnnotation definitions.ScalaNoInlineClass)
+ }
+
+ override def init(m: icodes.IMethod) {
+ super.init(m)
+ remainingCALLs.clear()
+ knownUnsafe.clear()
+ knownSafe.clear()
+ // initially populate the watchlist with all callsites standing a chance of being inlined
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+ /* TODO Do we want to perform inlining in non-finally exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed.
+ * It's not that putting on radar only `linearizer linearizeAt (m, m.startBlock)` makes for much shorter inlining times (a minor speedup nonetheless)
+ * but the effect on method size could be explored. */
+ putOnRadar(m.linearizedBlocks(linearizer))
+ populatePerimeter()
+ assert(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock), "you gave me dead code")
+ }
+
+ def conclusives(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ knownBeforehand(b) filter { cm => inliner.isMonadicMethod(cm.method) || inliner.hasInline(cm.method) }
+ }
+
+ def knownBeforehand(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ b.toList collect { case c : opcodes.CALL_METHOD => c } filter { cm => isPreCandidate(cm) && isReceiverKnown(cm) }
+ }
+
+ private def isReceiverKnown(cm: opcodes.CALL_METHOD): Boolean = {
+ cm.method.isEffectivelyFinal && cm.method.owner.isEffectivelyFinal
+ }
+
+ private def putOnRadar(blocks: Traversable[BasicBlock]) {
+ for(bb <- blocks) {
+ val preCands = bb.toList collect {
+ case cm : opcodes.CALL_METHOD
+ if isPreCandidate(cm) /* && !isReceiverKnown(cm) */
+ => cm
+ }
+ isOnWatchlist ++= preCands
+ }
+ relevantBBs ++= blocks
+ }
+
+ /* the argument is also included in the result */
+ private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.predecessors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
+ * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
+ * In particular we can do without computing the outflow at B. */
+ private def populatePerimeter() {
+ isOnPerimeter.clear()
+ var done = true
+ do {
+ val (frontier, toPrune) = (relevantBBs filter hasNoRelevantSuccs) partition isWatching
+ isOnPerimeter ++= frontier
+ relevantBBs --= toPrune
+ done = toPrune.isEmpty
+ } while(!done)
+
+ lastInstruction.clear()
+ for(b <- isOnPerimeter; val lastIns = b.toList.reverse find isOnWatchlist) {
+ lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
+ }
+
+ // assertion: "no relevant block can have a predecessor that is on perimeter"
+ assert((for (b <- relevantBBs; if transitivePreds(b.predecessors) exists isOnPerimeter) yield b).isEmpty)
+ }
+
+ private val isOnPerimeter = mutable.Set.empty[BasicBlock]
+ private val lastInstruction = mutable.Map.empty[BasicBlock, opcodes.CALL_METHOD]
+
+ def hasNoRelevantSuccs(x: BasicBlock): Boolean = { !(x.successors exists relevantBBs) }
+
+ def isWatching(x: BasicBlock): Boolean = (x.toList exists isOnWatchlist)
+
+
+
+
+ /**
+
+ This method is invoked after one or more inlinings have been performed in basic blocks whose in-flow is non-bottom (this makes a difference later).
+ What we know about those inlinings is given by:
+
+ - `staleOut`: These are the blocks where a callsite was inlined.
+ For each callsite, all instructions in that block before the callsite were left in the block, and the rest moved to an `afterBlock`.
+ The out-flow of these basic blocks is thus in general stale, that's why we'll add them to the TFA worklist.
+
+ - `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
+
+ - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appearead
+ after a callsite in a `staleOut` block.
+
+ Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
+ Those caches are `relevantBBs` and `isOnPerimeter` (for blocks) and `isOnWatchlist` and `lastInstruction` (for CALL_METHODs).
+ Please notice that all `inlined` and `staleIn` blocks are reachable from `staleOut` blocks.
+
+ The update takes place in two steps:
+
+ (1) `staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }`
+ This results in initial populations for `relevantBBs` and `isOnWatchlist`.
+ Because of the way `isPreCandidate` reuses previous decision-outcomes that are still valid,
+ this already prunes some candidates standing no chance of being inlined.
+
+ (2) `populatePerimeter()`
+ Based on the CFG-subgraph determined in (1) as reflected in `relevantBBs`,
+ this method detects some blocks whose typeflows aren't needed past a certain CALL_METHOD
+ (not needed because none of its successors is relevant for the purposes of inlining, see `hasNoRelevantSuccs`).
+ The blocks thus chosen are said to be "on the perimeter" of the CFG-subgraph.
+ For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
+
+ */
def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: collection.Set[BasicBlock], staleIn: collection.Set[BasicBlock]) {
if (this.method == null || this.method.symbol != m.symbol) {
init(m)
@@ -633,31 +705,102 @@ abstract class TypeFlowAnalysis {
return;
}
- reinit {
- // asserts conveying an idea what CFG shapes arrive here.
- // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
- // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert( !in.isDefinedAt(p), p))
- // inlined foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
- // staleOut foreach (p => assert( in.isDefinedAt(p), p))
-
- // never rewrite in(m.startBlock)
- staleOut foreach { b =>
- if(!inlined.contains(b)) { worklist += b }
- out(b) = typeFlowLattice.bottom
- }
- // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
- blankOut(inlined)
- blankOut(staleIn)
- // no need to add startBlocks from m.exh
+ worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+
+ // asserts conveying an idea what CFG shapes arrive here:
+ // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
+ // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert( !in.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
+ // staleOut foreach (p => assert( in.isDefinedAt(p), p))
+
+ // remainingCALLs.clear()
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+
+ // never rewrite in(m.startBlock)
+ staleOut foreach { b =>
+ enqueue(b)
+ out(b) = typeFlowLattice.bottom
}
+ // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
+ blankOut(inlined)
+ blankOut(staleIn)
+ // no need to add startBlocks from m.exh
+
+ staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }
+ populatePerimeter()
+
+ } // end of method reinit
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(b: BasicBlock) {
+ assert(in(b) ne typeFlowLattice.bottom)
+ if(!worklist.contains(b)) { worklist += b }
+ }
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(bs: Traversable[BasicBlock]) {
+ bs foreach enqueue
}
private def blankOut(blocks: collection.Set[BasicBlock]) {
blocks foreach { b =>
- in(b) = typeFlowLattice.bottom
- out(b) = typeFlowLattice.bottom
+ in(b) = typeFlowLattice.bottom
+ out(b) = typeFlowLattice.bottom
+ }
+ }
+
+ /*
+ This is basically the plain-old forward-analysis part of a dataflow algorithm,
+ adapted to skip non-relevant blocks (as determined by `reinit()` via `populatePerimeter()`).
+
+ The adaptations are:
+
+ - only relevant blocks dequeued from the worklist move on to have the transfer function applied
+
+ - `visited` now means the transfer function was applied to the block,
+ but please notice that this does not imply anymore its out-flow to be different from bottom,
+ because a block on the perimeter will have per-instruction typeflows computed only up to its `lastInstruction`.
+ In case you need to know whether a visted block `v` has been "fully visited", evaluate `out(v) ne typeflowLattice.bottom`
+
+ - given that the transfer function may remove callsite-candidates from the watchlist (thus, they are not candidates anymore)
+ there's an opportunity to detect whether a previously relevant block has been left without candidates.
+ That's what `shrinkedWatchlist` detects. Provided the block was on the perimeter, we know we can skip it from now now,
+ and we can also constrain the CFG-subgraph by finding a new perimeter (thus the invocation to `populatePerimeter()`).
+ */
+ override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
+ while (!worklist.isEmpty && relevantBBs.nonEmpty) {
+ if (stat) iterations += 1
+ val point = worklist.iterator.next; worklist -= point;
+ if(relevantBBs(point)) {
+ shrinkedWatchlist = false
+ val output = f(point, in(point))
+ visited += point;
+ if(isOnPerimeter(point)) {
+ if(shrinkedWatchlist && !isWatching(point)) {
+ relevantBBs -= point;
+ populatePerimeter()
+ }
+ } else {
+ val propagate = ((lattice.bottom == out(point)) || output != out(point))
+ if (propagate) {
+ out(point) = output
+ val succs = point.successors filter relevantBBs
+ succs foreach { p =>
+ assert((p.predecessors filter isOnPerimeter).isEmpty)
+ val updated = lattice.lub(List(output, in(p)), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ enqueue(p)
+ }
+ }
+ }
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index b5232fff09..8238705cc3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1914,7 +1914,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (sym.isInterface) ACC_INTERFACE else 0,
if (finalFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0
)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 66f802f74f..3d7f08cebe 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -38,6 +38,33 @@ abstract class Inliners extends SubComponent {
res
}
+ /** Look up implementation of method 'sym in 'clazz'.
+ */
+ def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
+ // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
+ def needsLookup = (
+ (clazz != NoSymbol)
+ && (clazz != sym.owner)
+ && !sym.isEffectivelyFinal
+ && clazz.isEffectivelyFinal
+ )
+ def lookup(clazz: Symbol): Symbol = {
+ // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ if (sym.owner == clazz || isBottomType(clazz)) sym
+ else sym.overridingSymbol(clazz) match {
+ case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
+ case imp => imp
+ }
+ }
+ if (needsLookup) {
+ val concreteMethod = lookup(clazz)
+ debuglog("\tlooked up method: " + concreteMethod.fullName)
+
+ concreteMethod
+ }
+ else sym
+ }
+
/* A warning threshold */
private final val MAX_INLINE_MILLIS = 2000
@@ -67,8 +94,7 @@ abstract class Inliners extends SubComponent {
try {
super.run()
} finally {
- inliner.NonPublicRefs.usesNonPublics.clear()
- inliner.recentTFAs.clear
+ inliner.clearCaches()
}
}
}
@@ -80,6 +106,21 @@ abstract class Inliners extends SubComponent {
def isClosureClass(cls: Symbol): Boolean =
cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
+ /*
+ TODO now that Inliner runs faster we could consider additional "monadic methods" (in the limit, all those taking a closure as last arg)
+ Any "monadic method" occurring in a given caller C that is not `isMonadicMethod()` will prevent CloseElim from eliminating
+ any anonymous-closure-class any whose instances are given as argument to C invocations.
+ */
+ def isMonadicMethod(sym: Symbol) = {
+ nme.unspecializedName(sym.name) match {
+ case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
+ case _ => false
+ }
+ }
+
+ def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
+ def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
+
/**
* Simple inliner.
*/
@@ -92,9 +133,6 @@ abstract class Inliners extends SubComponent {
}
import NonPublicRefs._
- private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
- private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
-
/** The current iclass */
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
@@ -121,6 +159,21 @@ abstract class Inliners extends SubComponent {
(hasRETURN, a)
}
+ def clearCaches() {
+ // methods
+ NonPublicRefs.usesNonPublics.clear()
+ recentTFAs.clear
+ tfa.knownUnsafe.clear()
+ tfa.knownSafe.clear()
+ tfa.knownNever.clear()
+ // basic blocks
+ tfa.preCandidates.clear()
+ tfa.relevantBBs.clear()
+ // callsites
+ tfa.remainingCALLs.clear()
+ tfa.isOnWatchlist.clear()
+ }
+
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
debuglog("Analyzing " + cls)
@@ -142,7 +195,38 @@ abstract class Inliners extends SubComponent {
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
+ /**
+ * A transformation local to the body of the argument.
+ * An linining decision consists in replacing a callsite with the body of the callee.
+ * Please notice that, because `analyzeMethod()` itself may modify a method body,
+ * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
+ * (no topological ordering over the call-graph is attempted).
+ *
+ * Making an inlining decision requires type-flow information for both caller and callee.
+ * Regarding the caller, such information is needed only for basic blocks containing inlining candidates
+ * (and their transitive predecessors). This observation leads to using a custom type-flow analysis (MTFAGrowable)
+ * that can be re-inited, i.e. that reuses lattice elements (type-flow information) computed in a previous iteration
+ * as starting point for faster convergence in a new iteration.
+ *
+ * The mechanics of inlining are iterative for a given invocation of `analyzeMethod(m)`,
+ * thus considering the basic blocks that successful inlining added in a previous iteration:
+ *
+ * (1) before the iterations proper start, so-called preinlining is performed.
+ * Those callsites whose (receiver, concreteMethod) are both known statically
+ * can be analyzed for inlining before computing a type-flow. Details in `preInline()`
+ *
+ * (2) the first iteration computes type-flow information for basic blocks containing inlining candidates
+ * (and their transitive predecessors), so called `relevantBBs`.
+ * The ensuing analysis of each candidate (performed by `analyzeInc()`)
+ * may result in a CFG isomorphic to that of the callee being inserted where the callsite was
+ * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG, which we call "successful inlining").
+ *
+ * (3) following iterations have their relevant basic blocks updated to focus
+ * on the inlined basic blocks and their successors only. Details in `MTFAGrowable.reinit()`
+ * */
def analyzeMethod(m: IMethod): Unit = {
+ // m.normalize
+
var sizeBeforeInlining = m.code.blockCount
var instrBeforeInlining = m.code.instructionCount
var retry = false
@@ -154,17 +238,53 @@ abstract class Inliners extends SubComponent {
val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
val caller = new IMethodInfo(m)
- var info: tfa.lattice.Elem = null
- def analyzeInc(msym: Symbol, i: Instruction, bb: BasicBlock): Boolean = {
- var inlined = false
- def paramTypes = msym.info.paramTypes
- val receiver = (info.stack.types drop paramTypes.length) match {
- case Nil => log("analyzeInc(" + msym + "), no type on the stack!") ; NoSymbol
- case REFERENCE(s) :: _ => s
- case _ => NoSymbol
+ def preInline(isFirstRound: Boolean): Int = {
+ val inputBlocks = caller.m.linearizedBlocks()
+ val callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]] = {
+ if(isFirstRound) tfa.conclusives else tfa.knownBeforehand
}
- val concreteMethod = lookupImplFor(msym, receiver)
+ inlineWithoutTFA(inputBlocks, callsites)
+ }
+
+ /**
+ * Inline straightforward callsites (those that can be inlined without a TFA).
+ *
+ * To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
+ * - callsite and block containing it
+ * - actual (ie runtime) class of the receiver
+ * - actual (ie runtime) method being invoked
+ * - stack length just before the callsite (to check whether enough arguments have been pushed).
+ * The assert below lists the conditions under which "no TFA is needed"
+ * (the statically known receiver and method are both final, thus, at runtime they can't be any others than those).
+ *
+ */
+ def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
+ var inlineCount = 0
+ import scala.util.control.Breaks._
+ for(x <- inputBlocks; val easyCake = callsites(x); if easyCake.nonEmpty) {
+ breakable {
+ for(ocm <- easyCake) {
+ assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
+ if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
+ inlineCount += 1
+ break
+ }
+ }
+ }
+ }
+
+ inlineCount
+ }
+
+ /**
+ Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
+ at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
+
+ */
+ def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
+ var inlined = false
+ val msym = i.method
def warnNoInline(reason: String) = {
if (hasInline(msym) && !caller.isBridge)
@@ -209,7 +329,7 @@ abstract class Inliners extends SubComponent {
val inc = new IMethodInfo(callee)
val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- if (pair isStampedForInlining info.stack) {
+ if (pair isStampedForInlining stackLength) {
retry = true
inlined = true
if (isCountable)
@@ -228,9 +348,9 @@ abstract class Inliners extends SubComponent {
}
else {
if (settings.debug.value)
- pair logFailure info.stack
+ pair logFailure stackLength
- warnNoInline(pair failureReason info.stack)
+ warnNoInline(pair failureReason stackLength)
}
case None =>
warnNoInline("bytecode was not available")
@@ -241,38 +361,96 @@ abstract class Inliners extends SubComponent {
if (!isAvailable) "bytecode was not available"
else "it can be overridden"
)
+
inlined
}
- import scala.util.control.Breaks._
+ /* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
+ * where both method and receiver are final, which implies that the receiver computed via TFA will always match `concreteMethod.owner`.
+ *
+ * As with any invocation of `analyzeInc()` the inlining outcome is based on heuristics which favor inlining an isMonadicMethod before other methods.
+ * That's why preInline() is invoked twice: any inlinings downplayed by the heuristics during the first round get an opportunity to rank higher during the second.
+ *
+ * As a whole, both `preInline()` invocations amount to priming the inlining process,
+ * so that the first TFA run afterwards is able to gain more information as compared to a cold-start.
+ */
+ val totalPreInlines = {
+ val firstRound = preInline(true)
+ if(firstRound == 0) 0 else (firstRound + preInline(false))
+ }
+ staleOut.clear()
+ splicedBlocks.clear()
+ staleIn.clear()
+
do {
retry = false
log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+
+ /* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
+ tfa.callerLin = caller.m.linearizedBlocks()
+ /* TODO Do we want to perform inlining in non-finally exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed).
+ * The alternative above would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
+ * See also comment on the same topic in TypeFlowAnalysis. */
+
tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
tfa.run
staleOut.clear()
splicedBlocks.clear()
staleIn.clear()
- caller.m.linearizedBlocks() foreach { bb =>
- info = tfa in bb
-
+ import scala.util.control.Breaks._
+ for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {
+ val cms = bb.toList collect { case cm : CALL_METHOD => cm }
breakable {
- for (i <- bb) {
- i match {
- // Dynamic == normal invocations
- // Static(true) == calls to private members
- case CALL_METHOD(msym, Dynamic | Static(true)) if !msym.isConstructor =>
- if (analyzeInc(msym, i, bb)) {
- break
- }
- case _ => ()
+ for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
+ val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
+ if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
+ break
}
- info = tfa.interpret(info, i)
}
}
+ }
+
+ /* As part of inlining, some instructions are moved to a new block.
+ * In detail: the instructions moved to a new block originally appeared after a (by now inlined) callsite.
+ * Their new home is an `afterBlock` created by `doInline()` to that effect.
+ * Each block in staleIn is one such `afterBlock`.
+ *
+ * Some of those instructions may be CALL_METHOD possibly tracked in `remainingCALLs`
+ * (with an entry still noting the old containing block). However, that causes no problem:
+ *
+ * (1) such callsites won't be analyzed for inlining by `analyzeInc()` (*in this iteration*)
+ * because of the `break` that abandons the original basic block where it was contained.
+ *
+ * (2) Additionally, its new containing block won't be visited either (*in this iteration*)
+ * because the new blocks don't show up in the linearization computed before inlinings started:
+ * `for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {`
+ *
+ * For a next iteration, the new home of any instructions that have moved
+ * will be tracked properly in `remainingCALLs` after `MTFAGrowable.reinit()` puts on radar their new homes.
+ *
+ */
+ if(retry) {
+ for(afterBlock <- staleIn) {
+ val justCALLsAfter = afterBlock.toList collect { case c : opcodes.CALL_METHOD => c }
+ for(ia <- justCALLsAfter) { tfa.remainingCALLs.remove(ia) }
+ }
+ }
+ /*
+ if(splicedBlocks.nonEmpty) { // TODO explore (saves time but leads to slightly different inlining decisions)
+ // opportunistically perform straightforward inlinings before the next typeflow round
+ val savedRetry = retry
+ val savedStaleOut = staleOut.toSet; staleOut.clear()
+ val savedStaleIn = staleIn.toSet ; staleIn.clear()
+ val howmany = inlineWithoutTFA(splicedBlocks, tfa.knownBeforehand)
+ splicedBlocks ++= staleIn
+ staleOut.clear(); staleOut ++= savedStaleOut;
+ staleIn.clear(); staleIn ++= savedStaleIn;
+ retry = savedRetry
}
+ */
if (tfa.stat)
log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
@@ -288,13 +466,6 @@ abstract class Inliners extends SubComponent {
}
}
- private def isMonadicMethod(sym: Symbol) = {
- nme.unspecializedName(sym.name) match {
- case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
- case _ => false
- }
- }
-
private def isHigherOrderMethod(sym: Symbol) =
sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists isFunctionType)
@@ -308,33 +479,6 @@ abstract class Inliners extends SubComponent {
res
}
- /** Look up implementation of method 'sym in 'clazz'.
- */
- def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
- // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
- def needsLookup = (
- (clazz != NoSymbol)
- && (clazz != sym.owner)
- && !sym.isEffectivelyFinal
- && clazz.isEffectivelyFinal
- )
- def lookup(clazz: Symbol): Symbol = {
- // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
- }
- if (needsLookup) {
- val concreteMethod = lookup(clazz)
- debuglog("\tlooked up method: " + concreteMethod.fullName)
-
- concreteMethod
- }
- else sym
- }
-
class IMethodInfo(val m: IMethod) {
val sym = m.symbol
val name = sym.name
@@ -386,10 +530,13 @@ abstract class Inliners extends SubComponent {
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
- def doInline(block: BasicBlock, instr: Instruction) {
+ def doInline(block: BasicBlock, instr: CALL_METHOD) {
staleOut += block
+ tfa.remainingCALLs.remove(instr) // this bookkpeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
+ tfa.isOnWatchlist.remove(instr) // ditto
+
val targetPos = instr.pos
log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
@@ -557,10 +704,10 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value) icodes.checkValid(caller.m)
}
- def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.m.hasCode && shouldInline && isSafeToInline(stack)
+ def isStampedForInlining(stackLength: Int) =
+ !sameSymbols && inc.m.hasCode && shouldInline && isSafeToInline(stackLength)
- def logFailure(stack: TypeStack) = log(
+ def logFailure(stackLength: Int) = log(
"""|inline failed for %s:
| pair.sameSymbols: %s
| inc.numInlined < 2: %s
@@ -569,13 +716,13 @@ abstract class Inliners extends SubComponent {
| shouldInline: %s
""".stripMargin.format(
inc.m, sameSymbols, inlinedMethodCount(inc.sym) < 2,
- inc.m.hasCode, isSafeToInline(stack), shouldInline
+ inc.m.hasCode, isSafeToInline(stackLength), shouldInline
)
)
- def failureReason(stack: TypeStack) =
+ def failureReason(stackLength: Int) =
if (!inc.m.hasCode) "bytecode was unavailable"
- else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
+ else if (!isSafeToInline(stackLength)) "it is unsafe (target may reference private fields)"
else "of a bug (run with -Ylog:inline -Ydebug for more information)"
def canAccess(level: NonPublicRefs.Value) = level match {
@@ -587,15 +734,26 @@ abstract class Inliners extends SubComponent {
private def sameOwner = caller.owner == inc.owner
/** A method is safe to inline when:
- * - it does not contain calls to private methods when
- * called from another class
+ * - it does not contain calls to private methods when called from another class
* - it is not inlined into a position with non-empty stack,
* while having a top-level finalizer (see liftedTry problem)
* - it is not recursive
* Note:
* - synthetic private members are made public in this pass.
*/
- def isSafeToInline(stack: TypeStack): Boolean = {
+ def isSafeToInline(stackLength: Int): Boolean = {
+
+ if(tfa.blackballed(inc.sym)) { return false }
+ if(tfa.knownSafe(inc.sym)) { return true }
+
+ if(helperIsSafeToInline(stackLength)) {
+ tfa.knownSafe += inc.sym; true
+ } else {
+ tfa.knownUnsafe += inc.sym; false
+ }
+ }
+
+ private def helperIsSafeToInline(stackLength: Int): Boolean = {
def makePublic(f: Symbol): Boolean =
(inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
debuglog("Making not-private symbol out of synthetic: " + f)
@@ -642,9 +800,10 @@ abstract class Inliners extends SubComponent {
})
canAccess(accessNeeded) && {
- val isIllegalStack = (stack.length > inc.minimumStack && inc.hasNonFinalizerHandler)
+ val isIllegalStack = (stackLength > inc.minimumStack && inc.hasNonFinalizerHandler)
+
!isIllegalStack || {
- debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer. Stack: " + stack)
+ debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer.")
false
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index f251fd83fb..0539c885c2 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -22,6 +22,7 @@ import scala.tools.util.PathResolver
* changes require a compilation. It repeats this process until
* a fixpoint is reached.
*/
+@deprecated("Use sbt incremental compilation mechanism", "2.10.0")
class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index c742ab89c0..48a5fa9e34 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -118,8 +118,9 @@ trait MemberHandlers {
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
private def vparamss = member.vparamss
- // true if 0-arity
- override def definesValue = vparamss.isEmpty || vparamss.head.isEmpty
+ private def isMacro = member.mods.hasFlag(scala.reflect.internal.Flags.MACRO)
+ // true if not a macro and 0-arity
+ override def definesValue = !isMacro && (vparamss.isEmpty || vparamss.head.isEmpty)
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0c94e40d68..3490c1f5a8 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -393,8 +393,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// assumed true unless we see public/private/protected
var isPackageAccess = true
var annots: List[Tree] = Nil
- def addAnnot(sym: Symbol) =
- annots :+= New(TypeTree(sym.tpe), List(Nil))
+ def addAnnot(sym: Symbol) = annots :+= New(sym)
while (true) {
in.token match {
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index d1c71faf1e..820a8d7773 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -516,14 +516,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
}
- def delayedInitCall(closure: Tree) =
- localTyper.typed {
- atPos(impl.pos) {
- Apply(
- Select(This(clazz), delayedInitMethod),
- List(New(TypeTree(closure.symbol.tpe), List(List(This(clazz))))))
- }
- }
+ def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol, This(clazz))))
+ }
/** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 7f7f7e7b65..252b3ccffc 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -517,7 +517,7 @@ abstract class ExplicitOuter extends InfoTransform
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
// entry point for pattern matcher translation
- case mch: Match =>
+ case mch: Match if (!opt.virtPatmat) => // don't use old pattern matcher as fallback when the user wants the virtualizing one
matchTranslation(mch)
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 4fc7b9f92f..5c015944c8 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -420,7 +420,7 @@ abstract class LambdaLift extends InfoTransform {
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
case _ =>
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(expr))
+ New(sym, expr)
}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 05f5dbc379..bb0388e9f8 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -69,7 +69,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
ScalaValueClasses, isValueClass, isScalaValueType,
SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass,
AnyRefClass, ObjectClass, AnyRefModule,
- GroupOfSpecializable, uncheckedVarianceClass
+ GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
}
/** TODO - this is a lot of maps.
@@ -87,9 +87,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Map class symbols to the type environments where they were created. */
private val typeEnv = mutable.HashMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
- // holds mappings from regular type parameter symbols to symbols of
- // specialized type parameters which are subtypes of AnyRef
- private val anyrefSpecCache = perRunCaches.newMap[Symbol, Symbol]()
+ // Key: a specialized class or method
+ // Value: a map from tparams in the original class to tparams in the specialized class.
+ private val anyrefSpecCache = perRunCaches.newMap[Symbol, mutable.Map[Symbol, Symbol]]()
// holds mappings from members to the type variables in the class
// that they were already specialized for, so that they don't get
@@ -104,15 +104,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized
private def specializedOn(sym: Symbol): List[Symbol] = {
sym getAnnotation SpecializedClass match {
- case Some(ann @ AnnotationInfo(_, args, _)) =>
+ case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
+ case Some(ann @ AnnotationInfo(_, args, _)) => {
args map (_.tpe) flatMap { tp =>
tp baseType GroupOfSpecializable match {
case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
arg.typeArgs map (_.typeSymbol)
- case _ =>
+ case _ =>
List(tp.typeSymbol)
}
}
+ }
case _ => Nil
}
}
@@ -120,11 +122,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
- private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = (
- (specializedOn(sym) contains AnyRefModule)
- && !isValueClass(tp.typeSymbol)
- && isBoundedGeneric(tp)
- )
+ private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
+ specializedOn(sym).exists(s => !isValueClass(s)) &&
+ !isValueClass(tp.typeSymbol) &&
+ isBoundedGeneric(tp)
+ //(tp <:< AnyRefClass.tpe)
+ }
private def isBoundedGeneric(tp: Type) = tp match {
case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
case TypeRef(_, sym, _) => !isValueClass(sym)
@@ -279,10 +282,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- val args1 = map2(args, sym.typeParams) {
- case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
- case (tp, _) => tp
- }
+ val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ else tp
+ )
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args))
case None => typeRef(pre1, sym, args)
@@ -305,7 +308,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else specializedTypeVars(sym).intersect(env.keySet)
)
val (methparams, others) = tvars.toList sortBy ("" + _.name) partition (_.owner.isMethod)
- debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars)
+ log("specName(" + sym + ") env: " + env + " tvars: " + tvars)
specializedName(sym.name, methparams map env, others map env)
}
@@ -322,10 +325,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
- val abbrevs = definitions.abbrvTag withDefaultValue definitions.abbrvTag(ObjectClass)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => abbrevs(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => abbrevs(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
}
@@ -345,13 +347,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* These are in a meaningful order for stability purposes.
*/
def concreteTypes(sym: Symbol): List[Type] = {
- val types = (
- if (!isSpecialized(sym)) Nil // no @specialized Annotation
- else specializedOn(sym) match {
- case Nil => specializableTypes // specialized on everything
- case args => args map (s => specializesClass(s).tpe) sorted // specialized on args
- }
- )
+ val types = if (!isSpecialized(sym))
+ Nil // no @specialized Annotation
+ else
+ specializedOn(sym) map (s => specializesClass(s).tpe) sorted
+
if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
@@ -370,8 +370,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case set :: Nil => set map (x => List(x))
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
- // zip the keys with each permutation to create a TypeEnv
- loop(keys map concreteTypes) map (xss => Map(keys zip xss: _*))
+ // zip the keys with each permutation to create a TypeEnv.
+ // If we don't exclude the "all AnyRef" specialization, we will
+ // incur duplicate members and crash during mixin.
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -428,27 +430,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => Set()
}
- /** Returns the type parameter in the specialized class `clazz` that corresponds to type parameter
- * `sym` in the original class. It will create it if needed or use the one from the cache.
+ /** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
+ * `tparam` in the original class. It will create it if needed or use the one from the cache.
*/
- private def typeParamSubAnyRef(sym: Symbol, clazz: Symbol) = (
- anyrefSpecCache.getOrElseUpdate(sym,
- clazz.newTypeParameter(sym.name append nme.SPECIALIZED_SUFFIX_NAME toTypeName, sym.pos)
- setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
+ private def typeParamSubAnyRef(tparam: Symbol, sClass: Symbol): Type = {
+ val sClassMap = anyrefSpecCache.getOrElseUpdate(sClass, mutable.Map[Symbol, Symbol]())
+
+ sClassMap.getOrElseUpdate(tparam,
+ tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
).tpe
- )
+ }
/** Cleans the anyrefSpecCache of all type parameter symbols of a class.
*/
- private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) = (
+ private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) {
// remove class type parameters and those of normalized members.
- clazz :: decls foreach {
- _.tpe match {
- case PolyType(tparams, _) => anyrefSpecCache --= tparams
- case _ => ()
- }
- }
- )
+ clazz :: decls foreach (anyrefSpecCache remove _)
+ }
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
@@ -832,7 +831,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
log("-->d SETTING PRIVATE WITHIN TO " + sym.enclosingPackage + " for " + sym)
}
- sym.resetFlag(FINAL)
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -963,6 +961,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case object UnifyError extends scala.util.control.ControlThrowable
+ private[this] def unifyError(tp1: Any, tp2: Any): Nothing = {
+ log("unifyError" + ((tp1, tp2)))
+ throw UnifyError
+ }
/** Return the most general type environment that specializes tp1 to tp2.
* It only allows binding of type parameters annotated with @specialized.
@@ -973,29 +975,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = (tp1, tp2) match {
case (TypeRef(_, sym1, _), _) if isSpecialized(sym1) =>
debuglog("Unify - basic case: " + tp1 + ", " + tp2)
- if (isValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
+ if (isValueClass(tp2.typeSymbol))
env + ((sym1, tp2))
+ else if (isSpecializedAnyRefSubtype(tp2, sym1))
+ env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ else if (strict)
+ unifyError(tp1, tp2)
else
- if (strict) throw UnifyError else env
+ env
case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
debuglog("Unify TypeRefs: " + tp1 + " and " + tp2 + " with args " + (args1, args2) + " - ")
- if (strict && args1.length != args2.length) throw UnifyError
+ if (strict && args1.length != args2.length) unifyError(tp1, tp2)
val e = unify(args1, args2, env, strict)
debuglog("unified to: " + e)
e
case (TypeRef(_, sym1, _), _) if sym1.isTypeParameterOrSkolem =>
env
case (MethodType(params1, res1), MethodType(params2, res2)) =>
- if (strict && params1.length != params2.length) throw UnifyError
+ if (strict && params1.length != params2.length) unifyError(tp1, tp2)
debuglog("Unify MethodTypes: " + tp1 + " and " + tp2)
unify(res1 :: (params1 map (_.tpe)), res2 :: (params2 map (_.tpe)), env, strict)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- if (strict && tparams1.length != tparams2.length) throw UnifyError
debuglog("Unify PolyTypes: " + tp1 + " and " + tp2)
- unify(res1, res2, env, strict)
- case (PolyType(_, res), other) =>
- unify(res, other, env, strict)
- case (ThisType(_), ThisType(_)) => env
+ if (strict && tparams1.length != tparams2.length)
+ unifyError(tp1, tp2)
+ else
+ unify(res1, res2, env, strict)
+ case (PolyType(_, res), other) => unify(res, other, env, strict)
+ case (ThisType(_), ThisType(_)) => env
case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
@@ -1017,7 +1024,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (env.keySet intersect nenv.keySet isEmpty) env ++ nenv
else {
debuglog("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
- throw UnifyError
+ unifyError(tp1, tp2)
}
}
}
@@ -1219,18 +1226,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val body: mutable.Map[Symbol, Tree] = new mutable.HashMap
/** Map a specializable method to its value parameter symbols. */
- val parameters: mutable.Map[Symbol, List[List[Symbol]]] = new mutable.HashMap
+ val parameters = mutable.HashMap[Symbol, List[Symbol]]()
/** Collect method bodies that are concrete specialized methods.
*/
class CollectMethodBodies extends Traverser {
override def traverse(tree: Tree) = tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(mods, name, tparams, vparams :: Nil, tpt, rhs) =>
if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) {
debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = mmap(vparamss)(_.symbol)
+ parameters(tree.symbol) = vparams.map(_.symbol)
concreteSpecMethods -= tree.symbol
} // no need to descend further down inside method bodies
@@ -1243,7 +1250,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) =
+ def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) = {
(treeType =:= memberType) || { // anyref specialization
memberType match {
case PolyType(_, resTpe) =>
@@ -1260,6 +1267,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => false
}
}
+ }
override def transform(tree: Tree): Tree = {
val symbol = tree.symbol
@@ -1290,16 +1298,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- if (findSpec(tpt.tpe).typeSymbol ne tpt.tpe.typeSymbol) {
+ log("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = findSpec(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) {
// the ctor can be specialized
- log("** instantiated specialized type: " + findSpec(tpt.tpe))
- try {
- atPos(tree.pos)(
- localTyper.typed(
- Apply(
- Select(New(TypeTree(findSpec(tpt.tpe))), nme.CONSTRUCTOR),
- transformTrees(args))))
- } catch {
+ log("** instantiated specialized type: " + found)
+ try localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
+ catch {
case te: TypeError =>
reporter.error(tree.pos, te.msg)
super.transform(tree)
@@ -1526,12 +1531,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- private def reskolemize(tparams: List[TypeDef]): (List[Symbol], List[Symbol]) = {
- val saved = tparams map (_.symbol)
- localTyper skolemizeTypeParams tparams
- (saved, tparams map (_.symbol))
- }
-
private def duplicateBody(tree: DefDef, source: Symbol) = {
val symbol = tree.symbol
val meth = addBody(tree, source)
@@ -1555,8 +1554,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def addBody(tree: DefDef, source: Symbol): DefDef = {
val symbol = tree.symbol
- debuglog("specializing body of" + symbol.fullName + ": " + symbol.info)
- val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
+ debuglog("specializing body of" + symbol.defString)
+ val DefDef(mods, name, tparams, vparams :: Nil, tpt, _) = tree
// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
@@ -1564,12 +1563,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("substituting " + origtparams + " for " + symbol.typeParams)
// skolemize type parameters
- val (oldtparams, newtparams) = reskolemize(tparams)
+ val oldtparams = tparams map (_.symbol)
+ val newtparams = deriveFreshSkolems(oldtparams)
+ map2(tparams, newtparams)(_ setSymbol _)
// create fresh symbols for value parameters to hold the skolem types
- val vparamss1 = List(for (vdef <- vparamss.head; param = vdef.symbol) yield {
- ValDef(param cloneSymbol symbol substInfo (oldtparams, newtparams))
- })
+ val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams))
// replace value and type parameters of the old method with the new ones
// log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams)
@@ -1577,14 +1576,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("Type env of: " + tree.symbol + ": " + boundTvars)
// log("newtparams: " + newtparams)
val symSubstituter = new ImplementationAdapter(
- parameters(source).flatten ::: origtparams,
- vparamss1.flatten.map(_.symbol) ::: newtparams,
+ parameters(source) ::: origtparams,
+ newSyms ::: newtparams,
source.enclClass,
false) // don't make private fields public
- val tmp = symSubstituter(body(source).duplicate)
+
+ val newBody = symSubstituter(body(source).duplicate)
tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
- treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
+ treeCopy.DefDef(tree, mods, name, tparams, List(newSyms map ValDef), tpt, newBody)
}
/** Create trees for specialized members of 'sClass', based on the
@@ -1605,9 +1605,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (m.isMethod) {
if (info(m).target.hasAccessorFlag) hasSpecializedFields = true
if (m.isClassConstructor) {
- val origParamss = parameters(info(m).target)
+ val origParams = parameters(info(m).target)
val vparams = (
- map2(m.info.paramTypes, origParamss(0))((tp, sym) =>
+ map2(m.info.paramTypes, origParams)((tp, sym) =>
m.newValue(specializedName(sym, typeEnv(sClass)), sym.pos, sym.flags) setInfo tp
)
)
@@ -1698,7 +1698,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* - there is a getter for the specialized field in the same class
*/
def initializesSpecializedField(f: Symbol) = (
- (f.name endsWith nme.SPECIALIZED_SUFFIX_NAME)
+ (f.name endsWith nme.SPECIALIZED_SUFFIX)
&& clazz.info.member(nme.originalName(f.name)).isPublic
&& clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
)
@@ -1719,7 +1719,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* that here are not garbage collected at the end of a compiler run!
*/
def addConcreteSpecMethod(m: Symbol) {
- if (currentRun.compiles(m)) concreteSpecMethods += m
+ if (!forInteractive && currentRun.compiles(m)) concreteSpecMethods += m
}
private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = (
@@ -1733,9 +1733,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
- override def transform(tree: Tree) =
- if (settings.nospecialization.value) tree
+ override def transform(tree: Tree) = {
+ val resultTree = if (settings.nospecialization.value) tree
else atPhase(phase.next)(specializeCalls(unit).transform(tree))
+
+ // Remove the final modifier and @inline annotation from anything in the
+ // original class (since it's being overridden in at least onesubclass).
+ //
+ // We do this here so that the specialized subclasses will correctly copy
+ // final and @inline.
+ info.foreach {
+ case (sym, SpecialOverload(target, _)) => {
+ sym.resetFlag(FINAL)
+ target.resetFlag(FINAL)
+ sym.removeAnnotation(ScalaInlineClass)
+ target.removeAnnotation(ScalaInlineClass)
+ }
+ case _ => {}
+ }
+
+ resultTree
+ }
}
def printSpecStats() {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index ab4a2141a9..256c829db0 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -189,7 +189,7 @@ abstract class UnCurry extends InfoTransform
List(TypeTree(meth.tpe.finalResultType))),
List()),
Throw(Ident(ex)))
- val keyDef = ValDef(key, New(TypeTree(ObjectClass.tpe), List(List())))
+ val keyDef = ValDef(key, New(ObjectClass))
val tryCatch = Try(body, List(CaseDef(pat, EmptyTree, rhs)), EmptyTree)
Block(List(keyDef), tryCatch)
}
@@ -228,9 +228,9 @@ abstract class UnCurry extends InfoTransform
* case P_1 if G_1 => E_1
* ...
* case P_n if G_n => true
- * case _ => this.missingCase(x)
+ * case _ => this.missingCase(expr)
* }
- * def isDefinedAtCurrent(x: T): boolean = (x: @unchecked) match {
+ * def _isDefinedAt(x: T): boolean = (x: @unchecked) match {
* case P_1 if G_1 => true
* ...
* case P_n if G_n => true
@@ -240,7 +240,7 @@ abstract class UnCurry extends InfoTransform
* new $anon()
*
* However, if one of the patterns P_i if G_i is a default pattern,
- * drop the last default clause in tghe definition of `apply` and generate for `isDefinedAtCurrent` instead
+ * drop the last default clause in the definition of `apply` and generate for `_isDefinedAt` instead
*
* def isDefinedAtCurrent(x: T): boolean = true
*/
@@ -291,73 +291,26 @@ abstract class UnCurry extends InfoTransform
val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
- // waiting here until we can mix case classes and extractors reliably (i.e., when virtpatmat becomes the default)
- // object VirtPatmatOpt {
- // object Last {
- // def unapply[T](xs: List[T]) = xs.lastOption
- // }
- // // keep this in synch by what's generated by combineCases/runOrElse
- // object MatcherBlock {
- // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat??
- // case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats)
- // case _ => None
- // }
- // }
- // // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily
- // // case Block(Last(P)) =>
- // // case P =>
- // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match {
- // case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree])
- // case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner))
- // case b => treeBrowser browse b; None
- // }
- // }
-
- // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
- def dupMatch(selector: Tree, cases: List[CaseDef], wrap: Match => Tree = identity) = {
- def transformCase(cdef: CaseDef): CaseDef =
- CaseDef(cdef.pat, cdef.guard, Literal(Constant(true)))
- def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
-
- gen.mkUncheckedMatch(
- if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
- else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate)
- )
- }
+ object isDefinedAtTransformer extends gen.MatchMatcher {
+ // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
+ override def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = {
+ def transformCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, Literal(Constant(true)))
- def dupVirtMatch(zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], wrap: Block => Tree = identity) = {
- object dropMatchResAssign extends Transformer {
- // override val treeCopy = newStrictTreeCopier // will duplicate below
- override def transform(tree: Tree): Tree = tree match {
- // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
- case Block(List(matchRes, ass@Assign(keepGoingLhs, falseLit)), zero) if keepGoingLhs.symbol eq keepGoing.symbol =>
- Block(List(ass), zero)
- case _ =>
- super.transform(tree)
- }
+ def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+
+ val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+
+ gen.mkUncheckedMatch(
+ if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) Literal(Constant(true))
+ else substTree(wrap(Match(selector, (casesNoSynthCatchAll map transformCase) :+ defaultCase)).duplicate)
+ )
}
- val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
- val idaBlock = wrap(Block(
- zero ::
- x ::
- /* drop matchRes def */
- keepGoing ::
- statsNoMatchRes,
- NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing`
- ))
- substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
- }
- DefDef(m, (fun.body: @unchecked) match {
- case Match(selector, cases) =>
- dupMatch(selector, cases)
- case Block((vd: ValDef) :: Nil, Match(selector, cases)) => // can't factor this out using an extractor due to bugs in the old pattern matcher
- dupMatch(selector, cases, m => Block(List(vd), m))
- // virtpatmat -- TODO: find a better way to keep this in synch with the code generated by patmatvirtualizer
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat =>
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {
object noOne extends Transformer {
override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = tgt.tpe member newTermName("one")
+ val one = _match.tpe member newTermName("one")
override def transform(tree: Tree): Tree = tree match {
case Apply(fun, List(a)) if fun.symbol == one =>
// blow one's argument away since all we want to know is whether the match succeeds or not
@@ -367,15 +320,34 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
}
}
- substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
- // for the optimized version of virtpatmat
- case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) if opt.virtPatmat =>
- dupVirtMatch(zero, x, matchRes, keepGoing, stats)
- case Block(outerStats, Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _)) if opt.virtPatmat => // can't factor this out using an extractor due to bugs in the old pattern matcher
- dupVirtMatch(zero, x, matchRes, keepGoing, stats, m => Block(outerStats, m))
- // case other =>
- // treeBrowser browse other
- })
+ substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
+ }
+
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {
+ object dropMatchResAssign extends Transformer {
+ // override val treeCopy = newStrictTreeCopier // will duplicate below
+ override def transform(tree: Tree): Tree = tree match {
+ // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
+ case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
+ Block(List(assignKeepGoing), zero)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
+ val idaBlock = wrap(Block(
+ zero ::
+ x ::
+ /* drop matchRes def */
+ keepGoing ::
+ statsNoMatchRes,
+ NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
+ ))
+ substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
+ }
+ }
+
+ DefDef(m, isDefinedAtTransformer(fun.body))
}
val members =
@@ -385,9 +357,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, fun.pos)),
- Typed(
- New(TypeTree(anonClass.tpe), List(List())),
- TypeTree(fun.tpe)))
+ Typed(New(anonClass), TypeTree(fun.tpe)))
}
}
}
@@ -482,19 +452,6 @@ abstract class UnCurry extends InfoTransform
}
}
- /** For removing calls to specially designated methods.
- */
- def elideIntoUnit(tree: Tree): Tree = Literal(Constant()) setPos tree.pos setType UnitClass.tpe
- def isElidable(tree: Tree) = {
- val sym = treeInfo.methPart(tree).symbol
- // XXX settings.noassertions.value temporarily retained to avoid
- // breakage until a reasonable interface is settled upon.
- sym != null && sym.elisionLevel.exists(x => x < settings.elidebelow.value || settings.noassertions.value) && {
- log("Eliding call from " + tree.symbol.owner + " to " + sym + " based on its elision threshold of " + sym.elisionLevel.get)
- true
- }
- }
-
// ------ The tree transformers --------------------------------------------------------
def mainTransform(tree: Tree): Tree = {
@@ -532,8 +489,7 @@ abstract class UnCurry extends InfoTransform
finally this.inConstructorFlag = saved
}
- if (isElidable(tree)) elideIntoUnit(tree)
- else tree match {
+ tree match {
case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
withNeedLift(false) {
@@ -679,7 +635,8 @@ abstract class UnCurry extends InfoTransform
if (dd.symbol hasAnnotation VarargsClass) addJavaVarargsForwarders(dd, flatdd, tree)
flatdd
case Try(body, catches, finalizer) =>
- if (catches forall treeInfo.isCatchCase) tree
+ if (opt.virtPatmat) { if(catches exists (cd => !treeInfo.isCatchCase(cd))) debugwarn("VPM BUG! illegal try/catch "+ catches); tree }
+ else if (catches forall treeInfo.isCatchCase) tree
else {
val exname = unit.freshTermName("ex$")
val cases =
@@ -701,7 +658,7 @@ abstract class UnCurry extends InfoTransform
}
debuglog("rewrote try: " + catches + " ==> " + catchall);
val catches1 = localTyper.typedCases(
- tree, List(catchall), ThrowableClass.tpe, WildcardType)
+ List(catchall), ThrowableClass.tpe, WildcardType)
treeCopy.Try(tree, body, catches1, finalizer)
}
case Apply(Apply(fn, args), args1) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 6ee09d064f..ea5223e32f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -636,11 +636,6 @@ trait ContextErrors {
def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
-
- def MacroExpandError(tree: Tree, t: Any) = {
- issueNormalTypeError(tree, "macros must return a compiler-specific tree; returned class is: " + t.getClass)
- setError(tree)
- }
}
}
@@ -713,10 +708,17 @@ trait ContextErrors {
"constructor cannot be instantiated to expected type" + foundReqMsg(restpe, pt))
setError(tree)
}
-
- def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) =
+
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
+ // since inferMethodAlternative modifies the state of the tree
+ // we have to set the type of tree to ErrorType only in the very last
+ // fallback action that is done in the inference (tracking it manually is error prone).
+ // This avoids entering infinite loop in doTypeApply.
+ // TODO: maybe we should do the same thing with inferExprAlternative.
+ if (implicitly[Context].reportErrors) setError(tree)
+ }
def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
firstCompeting: Symbol, argtpes: List[Type], pt: Type) = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 740acbd10f..8586ebf0d4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -128,6 +128,8 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
+
+ var buffer: Set[AbsTypeError] = _
def enclClassOrMethod: Context =
if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
@@ -146,7 +148,6 @@ trait Contexts { self: Analyzer =>
}
private[this] var mode = 0
- private[this] val buffer = LinkedHashSet[AbsTypeError]()
def errBuffer = buffer
def hasErrors = buffer.nonEmpty
@@ -161,7 +162,7 @@ trait Contexts { self: Analyzer =>
def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
def setBufferErrors() = {
- assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
+ //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
mode = BufferErrors
}
def setThrowErrors() = mode &= (~AllMask)
@@ -226,6 +227,7 @@ trait Contexts { self: Analyzer =>
c.checking = this.checking
c.retyping = this.retyping
c.openImplicits = this.openImplicits
+ c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
@@ -266,6 +268,7 @@ trait Contexts { self: Analyzer =>
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
+ c.buffer = new LinkedHashSet[AbsTypeError]()
c
}
@@ -309,6 +312,7 @@ trait Contexts { self: Analyzer =>
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
def issue(err: AbsTypeError) {
+ if (settings.debug.value) println("issue error: " + err.errMsg)
if (reportErrors) unitError(err.errPos, addDiagString(err.errMsg))
else if (bufferErrors) { buffer += err }
else throw new TypeError(err.errPos, err.errMsg)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index acf905d974..e1aa8b46eb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1388,7 +1388,7 @@ trait Infer {
NoBestExprAlternativeError(tree, pt)
} else if (!competing.isEmpty) {
if (secondTry) NoBestExprAlternativeError(tree, pt)
- else { if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt) }
+ else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt)
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
@@ -1398,10 +1398,14 @@ trait Infer {
}
}
- @inline private def inSilentMode(expr: Typer => Boolean): Boolean = {
- val silentContext = context.makeSilent(context.ambiguousErrors)
- val res = expr(newTyper(silentContext))
- if (silentContext.hasErrors) false else res
+ @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
+ val oldState = context.state
+ context.setBufferErrors()
+ val res = expr
+ val contextWithErrors = context.hasErrors
+ context.flushBuffer()
+ context.restoreState(oldState)
+ res && !contextWithErrors
}
// Checks against the name of the parameter and also any @deprecatedName.
@@ -1466,15 +1470,13 @@ trait Infer {
argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- var secondTry = true
tryTwice {
- secondTry = !secondTry
debuglog("infer method alt "+ tree.symbol +" with alternatives "+
(alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
val applicable = resolveOverloadedMethod(argtpes, {
alts filter { alt =>
- inSilentMode(typer0 => typer0.infer.isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
+ inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
(!varArgsOnly || isVarArgsList(alt.tpe.params))
}
})
@@ -1490,11 +1492,8 @@ trait Infer {
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType) {
+ if (pt == WildcardType)
NoBestMethodAlternativeError(tree, argtpes, pt)
- if (secondTry)
- setError(tree)
- }
else
inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
} else if (!competing.isEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 470f3e7117..161e4b7a9a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -90,8 +90,19 @@ trait Macros { self: Analyzer =>
lazy val mirror = new scala.reflect.runtime.Mirror {
lazy val libraryClassLoader = {
+ // todo. this is more or less okay, but not completely correct
+ // see https://issues.scala-lang.org/browse/SI-5433 for more info
val classpath = global.classPath.asURLs
- ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ var loader: ClassLoader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+
+ // an heuristic to detect REPL
+ if (global.settings.exposeEmptyPackage.value) {
+ import scala.tools.nsc.interpreter._
+ val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
+ loader = new AbstractFileClassLoader(virtualDirectory, loader) {}
+ }
+
+ loader
}
override def defaultReflectiveClassLoader() = libraryClassLoader
@@ -112,19 +123,42 @@ trait Macros { self: Analyzer =>
if (mmeth == NoSymbol) None
else {
- val receiverClass: mirror.Symbol = mirror.symbolForName(mmeth.owner.fullName)
+ trace("loading implementation class: ")(mmeth.owner.fullName)
+ trace("classloader is: ")("%s of type %s".format(mirror.libraryClassLoader, mirror.libraryClassLoader.getClass))
+ def inferClasspath(cl: ClassLoader) = cl match {
+ case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
+ case _ => "<unknown>"
+ }
+ trace("classpath is: ")(inferClasspath(mirror.libraryClassLoader))
+
+ // @xeno.by: relies on the fact that macros can only be defined in static classes
+ def classfile(sym: Symbol): String = {
+ def recur(sym: Symbol): String = sym match {
+ case sym if sym.owner.isPackageClass =>
+ val suffix = if (sym.isModuleClass) "$" else ""
+ sym.fullName + suffix
+ case sym =>
+ val separator = if (sym.owner.isModuleClass) "" else "$"
+ recur(sym.owner) + separator + sym.javaSimpleName
+ }
+
+ if (sym.isClass || sym.isModule) recur(sym)
+ else recur(sym.enclClass)
+ }
+
+ // @xeno.by: this doesn't work for inner classes
+ // neither does mmeth.owner.javaClassName, so I had to roll my own implementation
+ //val receiverName = mmeth.owner.fullName
+ val receiverName = classfile(mmeth.owner)
+ val receiverClass: mirror.Symbol = mirror.symbolForName(receiverName)
+
if (debug) {
println("receiverClass is: " + receiverClass.fullNameString)
val jreceiverClass = mirror.classToJava(receiverClass)
val jreceiverSource = jreceiverClass.getProtectionDomain.getCodeSource
println("jreceiverClass is %s from %s".format(jreceiverClass, jreceiverSource))
-
- val jreceiverClasspath = jreceiverClass.getClassLoader match {
- case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
- case _ => "<unknown>"
- }
- println("jreceiverClassLoader is %s with classpath %s".format(jreceiverClass.getClassLoader, jreceiverClasspath))
+ println("jreceiverClassLoader is %s with classpath %s".format(jreceiverClass.getClassLoader, inferClasspath(jreceiverClass.getClassLoader)))
}
val receiverObj = receiverClass.companionModule
@@ -132,7 +166,11 @@ trait Macros { self: Analyzer =>
if (receiverObj == mirror.NoSymbol) None
else {
- val receiver = mirror.companionInstance(receiverClass)
+ // @xeno.by: yet another reflection method that doesn't work for inner classes
+ //val receiver = mirror.companionInstance(receiverClass)
+ val clazz = java.lang.Class.forName(receiverName, true, mirror.libraryClassLoader)
+ val receiver = clazz getField "MODULE$" get null
+
val rmeth = receiverObj.info.member(mirror.newTermName(mmeth.name.toString))
if (debug) {
println("rmeth is: " + rmeth.fullNameString)
@@ -147,6 +185,7 @@ trait Macros { self: Analyzer =>
}
} catch {
case ex: ClassNotFoundException =>
+ trace("implementation class failed to load: ")(ex.toString)
None
}
}
@@ -155,7 +194,7 @@ trait Macros { self: Analyzer =>
* Or, if that fails, and the macro overrides a method return
* tree that calls this method instead of the macro.
*/
- def macroExpand(tree: Tree, context: Context): Option[Any] = {
+ def macroExpand(tree: Tree, typer: Typer): Option[Any] = {
val trace = scala.tools.nsc.util.trace when settings.Ymacrodebug.value
trace("macroExpand: ")(tree)
@@ -180,7 +219,19 @@ trait Macros { self: Analyzer =>
// if one of those children involves macro expansion, things might get nasty
// that's why I'm temporarily turning this behavior off
nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
- Some(mirror.invoke(receiver, rmeth)(rawArgs: _*))
+ val expanded = mirror.invoke(receiver, rmeth)(rawArgs: _*)
+ expanded match {
+ case expanded: Tree =>
+ val expectedTpe = tree.tpe
+ val typed = typer.typed(expanded, EXPRmode, expectedTpe)
+ Some(typed)
+ case expanded if expanded.isInstanceOf[Tree] =>
+ typer.context.unit.error(tree.pos, "macro must return a compiler-specific tree; returned value is Tree, but it doesn't belong to this compiler's universe")
+ None
+ case expanded =>
+ typer.context.unit.error(tree.pos, "macro must return a compiler-specific tree; returned value is of class: " + expanded.getClass)
+ None
+ }
} catch {
case ex =>
val realex = ReflectionUtils.unwrapThrowable(ex)
@@ -191,14 +242,14 @@ trait Macros { self: Analyzer =>
} else {
realex.getMessage
}
- context.unit.error(tree.pos, "exception during macro expansion: " + msg)
+ typer.context.unit.error(tree.pos, "exception during macro expansion: " + msg)
None
} finally {
nodePrinters.infolevel = savedInfolevel
}
case None =>
def notFound() = {
- context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name)
+ typer.context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name)
None
}
def fallBackToOverridden(tree: Tree): Option[Tree] = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 51542ec757..3347d2e767 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1299,7 +1299,7 @@ trait Namers extends MethodSynthesis {
catch typeErrorHandler(tree, ErrorType)
result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => typer.deskolemizeTypeParams(tparams)(result)
+ case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
case _ => result
}
}
@@ -1439,8 +1439,11 @@ trait Namers extends MethodSynthesis {
private val ownerSym = owner.symbol
override val typeParams = tparams map (_.symbol) //@M
override val tree = restp.tree
- if (ownerSym.isTerm)
- typer skolemizeTypeParams tparams
+
+ if (ownerSym.isTerm) {
+ val skolems = deriveFreshSkolems(tparams map (_.symbol))
+ map2(tparams, skolems)(_ setSymbol _)
+ }
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index 6d31243fd0..25dcc302ce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -43,7 +43,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val outer = newTermName("<outer>")
val runOrElse = newTermName("runOrElse")
val zero = newTermName("zero")
- val __match = newTermName("__match")
+ val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
def counted(str: String, i: Int) = newTermName(str+i)
}
@@ -51,8 +51,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
object MatchTranslator {
def apply(typer: Typer): MatchTranslation = {
import typer._
- // typing `__match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
- newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ // typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
+ newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
case SilentResultValue(ms) => new PureMatchTranslator(typer, ms)
case _ => new OptimizingMatchTranslator(typer)
}
@@ -116,6 +116,10 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
import typer.{typed, context, silent, reallyExists}
+ private def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
+ case _ => tp
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -133,11 +137,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// and the only place that emits Matches after typers is for exception handling anyway)
assert(phase.id <= currentRun.typerPhase.id, phase)
- def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
- case _ => tp
- }
-
val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
val scrutSym = freshSym(scrut.pos, pureType(scrutType))
@@ -146,6 +145,47 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner)
}
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val okPt = repeatedToSeq(pt)
+ val switch = {
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, okPt)(caseDef), EmptySubstitution))
+ }
+
+ (emitTypeSwitch(bindersAndCases, pt) map (_.map(fixerUpper(matchOwner, pos).apply(_).asInstanceOf[CaseDef])))
+ }
+
+ val catches = switch getOrElse {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, okPt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, scrut => Throw(CODE.REF(exSym)))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
@@ -668,6 +708,10 @@ class Foo(x: Other) { x._1 } // no error in this order
def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] =
None
+ // for catch
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
abstract class TreeMaker {
/** captures the scope and the value of the bindings in patterns
* important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
@@ -788,6 +832,7 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// implements the run-time aspects of (ยง8.2) (typedPattern has already done the necessary type transformations)
+ // TODO: normalize construction, which yields a combination of a EqualityTestTreeMaker (when necessary) and a TypeTestTreeMaker
case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
val nextBinderTp = glb(List(patBinder.info.widen, pt))
@@ -843,6 +888,10 @@ class Foo(x: Other) { x._1 } // no error in this order
val cond = typeAndEqualityTest(patBinder, pt)
val res = codegen._asInstanceOf(patBinder, nextBinderTp)
+
+ // TODO: remove this
+ def isStraightTypeTest = cond match { case TypeApply(_, _) => cond.symbol == Any_isInstanceOf case _ => false }
+
override def toString = "TET"+(patBinder, pt)
}
@@ -926,25 +975,30 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){
- val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = {
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, CODE.MATCHERROR(_))
+ }
- emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFail: Tree => Tree): Tree = fixerUpper(owner, scrut.pos){
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt).getOrElse{
val (matcher, hasDefault, toHoist) =
- if (casesUnOpt nonEmpty) {
+ if (casesNoSubstOnly nonEmpty) {
// when specified, need to propagate pt explicitly (type inferencer can't handle it)
val optPt =
if (isFullyDefined(pt)) inMatchMonad(pt)
else NoType
- // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // do this check on casesNoSubstOnly, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
// exhaustivity and reachability must be checked before optimization as well
- val hasDefault = casesUnOpt.nonEmpty && {
- val nonTrivLast = casesUnOpt.last
+ // TODO: improve, a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ val hasDefault = casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
}
- val (cases, toHoist) = optimizeCases(scrutSym, casesUnOpt, pt)
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
val combinedCases =
cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt))
@@ -952,7 +1006,11 @@ class Foo(x: Other) { x._1 } // no error in this order
(combinedCases, hasDefault, toHoist)
} else (codegen.zero, false, Nil)
- val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault)
+ // catch-all
+ val catchAll =
+ if (hasDefault) None // no need for a catch-all when there's already a default
+ else Some(matchFail)
+ val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, catchAll)
if (toHoist isEmpty) expr
else Block(toHoist, expr)
}
@@ -966,7 +1024,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// TODO: do this during tree construction, but that will require tracking the current owner in treemakers
// TODO: assign more fine-grained positions
// fixes symbol nesting, assigns positions
- private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
currentOwner = origOwner
override def traverse(t: Tree) {
@@ -1019,7 +1077,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// codegen relevant to the structure of the translation (how extractors are combined)
trait AbsCodegen {
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
def zero: Tree
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
@@ -1098,10 +1156,10 @@ class Foo(x: Other) { x._1 } // no error in this order
protected def matchMonadSym = oneSig.finalResultType.typeSymbol
import CODE._
- def __match(n: Name): SelectStart = matchStrategy DOT n
+ def _match(n: Name): SelectStart = matchStrategy DOT n
private lazy val oneSig: Type =
- typer.typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
+ typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -1110,14 +1168,15 @@ class Foo(x: Other) { x._1 } // no error in this order
object pureCodegen extends CommonCodegen { import CODE._
//// methods in MatchingStrategy (the monad companion) -- used directly in translation
// __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
- = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
+ = _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
// __match.one(`res`)
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res)
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (_match(vpmName.one)) (res)
// __match.zero
- def zero: Tree = __match(vpmName.zero)
+ def zero: Tree = _match(vpmName.zero)
// __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then)
+ def guard(c: Tree, then: Tree, tp: Type): Tree = _match(vpmName.guard) APPLY (c, then)
//// methods in the monad instance -- used directly in translation
// `prev`.flatMap(`b` => `next`)
@@ -1437,94 +1496,145 @@ class Foo(x: Other) { x._1 } // no error in this order
}
}
- //// SWITCHES
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
- object SwitchablePattern { def unapply(pat: Tree) = pat match {
- case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches
- case _ => false
- }}
-
- // def isSwitchable(cases: List[(List[TreeMaker], Tree)]): Boolean = {
- // def isSwitchableTreeMaker(tm: TreeMaker) = tm match {
- // case tm@EqualityTestTreeMaker(_, SwitchablePattern(), _) => true
- // case SubstOnlyTreeMaker(_) => true
- // case AlternativesTreeMaker(_, altss, _) => altss forall (_.forall(isSwitchableTreeMaker))
- // case _ => false
- // }
- // }
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
+
+ def alternativesSupported: Boolean
- private val switchableTpes = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = {
- def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
if (xs exists (_.isEmpty)) None else Some(xs.flatten)
- def isSwitchableTpe(tpe: Type): Boolean =
- switchableTpes contains tpe
- def switchableConstToInt(x: Tree): Tree = {
- val Literal(const) = x
- const.tag match {
- case IntTag => x
- case ByteTag | ShortTag | CharTag => Literal(Constant(const.intValue))
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(defaultCase(scrutSym, btm.substitution(body)))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
+ val casePatterns = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ sequence(casePatterns) map { patterns =>
+ val substedBody = btm.substitution(body)
+ CaseDef(Alternative(patterns), EmptyTree, substedBody)
+ }
+ case _ => //println("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
}
- }
- val caseDefs = cases map { makers =>
- removeSubstOnly(makers) match {
- // default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches)
- case (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body)))
- // constant
- case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(switchableConstToInt(const), EmptyTree, btm.substitution(body)))
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty)
- val caseConstants = altss map {
- case EqualityTestTreeMaker(_, const@SwitchablePattern(), _) :: Nil =>
- Some(switchableConstToInt(const))
- case _ =>
- None
+ (for(
+ caseDefs <- sequence(caseDefs)) yield
+ if (caseDefs exists isDefault) caseDefs
+ else {
+ caseDefs :+ defaultCase()
}
+ ) getOrElse Nil
+ }
+ }
- sequence(caseConstants) map { contants =>
- val substedBody = btm.substitution(body)
- CaseDef(Alternative(contants), EmptyTree, substedBody)
- }
- case _ =>
- None //failure (can't translate pattern to a switch)
+ class RegularSwitchMaker(scrutSym: Symbol) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
+ case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
}
}
- if (!isSwitchableTpe(scrut.tpe))
- None // TODO: emit a cast of the scrutinee and a switch on the cast scrutinee if patterns allow switch but the type of the scrutinee doesn't
- else {
- sequence(caseDefs) map { caseDefs =>
- import CODE._
- val caseDefsWithDefault = {
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
- val hasDefault = caseDefs exists isDefault
- if (hasDefault) caseDefs else {
- val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) }
- caseDefs :+ default
- }
- }
- val matcher = BLOCK(
- if (scrut.tpe != IntClass.tpe) {
- scrutSym setInfo IntClass.tpe
- VAL(scrutSym) === (scrut DOT nme.toInt)
- } else {
- VAL(scrutSym) === scrut
- },
- Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut
- )
- // matcher filter (tree => tree.tpe == null) foreach println
- // treeBrowser browse matcher
- matcher // set type to avoid recursion in typedMatch
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ DEFAULT ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if(scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault)
+ ))
+ }
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ def switchableTpe(tp: Type) = true
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm@TypeTestTreeMaker(_, _, _) =>
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */)) // -- TODO: use this if binder does not occur in the body
+ case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) if tm.isStraightTypeTest =>
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */))
+ case _ =>
+ None
}
}
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
}
}
@@ -1551,33 +1661,31 @@ class Foo(x: Other) { x._1 } // no error in this order
/** Inline runOrElse and get rid of Option allocations
*
- * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse (throw new MatchError(x))
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
* the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
@inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = {
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]) = {
matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
BLOCK(
VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
- VAL(scrutSym) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for scrutSym in `matcher` -- the owner structure is repaired by fixerUpper
+ VAL(scrutSym) === scrut,
VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
VAL(keepGoing) === TRUE,
matcher,
- if(hasDefault) REF(matchRes)
- else (IF (REF(keepGoing)) THEN MATCHERROR(REF(scrutSym)) ELSE REF(matchRes))
+ catchAll map { catchAllGen => (IF (REF(keepGoing)) THEN catchAllGen(REF(scrutSym)) ELSE REF(matchRes)) } getOrElse REF(matchRes)
)
}
// only used to wrap the RHS of a body
def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
BLOCK(
- if (dontStore(matchPt)) res // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
- else (REF(matchRes) === res), // _asInstanceOf(res, tp.widen, force = true)
- REF(keepGoing) === FALSE,
+ REF(keepGoing) === FALSE, // comes before assignment to matchRes, so the latter is in tail positions (can ignore the trailing zero -- will disappear when we flatten blocks, which is TODO)
+ if (dontStore(matchPt)) res else (REF(matchRes) === res), // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
zero // to have a nice lub for lubs -- otherwise we'll get a boxed unit here -- TODO: get rid of all those dangling else zero's
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index e313edb3f6..92e9f54190 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -282,6 +282,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def otherTp = self.memberType(other)
def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
def isRootOrNone(sym: Symbol) = sym == RootClass || sym == NoSymbol
+ def isNeitherInClass = (member.owner != clazz) && (other.owner != clazz)
def objectOverrideErrorMsg = (
"overriding " + other.fullLocationString + " with " + member.fullLocationString + ":\n" +
"an overriding object must conform to the overridden object's class bound" +
@@ -383,7 +384,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
overrideError("cannot override final member");
// synthetic exclusion needed for (at least) default getters.
} else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) {
- overrideError("needs `override' modifier");
+ if (isNeitherInClass && !(other.owner isSubClass member.owner))
+ emitOverrideError(
+ clazz + " inherits conflicting members:\n "
+ + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member)
+ + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)"
+ )
+ else
+ overrideError("needs `override' modifier")
} else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) {
overrideError("needs `abstract override' modifiers")
} else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
@@ -1443,26 +1451,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
transform(qual)
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
- unit.deprecationWarning(tree.pos,
- "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
- val manif = {
- var etpe = tpt.tpe
- for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
- if (etpe == NoType) {
- unit.error(tree.pos, "too many dimensions for array creation")
- Literal(Constant(null))
- } else {
- localTyper.getManifestTree(tree, etpe, false)
- }
- }
- val newResult = localTyper.typedPos(tree.pos) {
- new ApplyToImplicitArgs(gen.mkMethodCall(ArrayModule, nme.ofDim, args), List(manif))
- }
- currentApplication = tree
- newResult
-
case Apply(fn, args) =>
checkSensible(tree.pos, fn, args)
currentApplication = tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index ef69e1525e..1c1f35aac2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -941,10 +941,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (tree.isType)
adaptType()
- else if (inExprModeButNot(mode, FUNmode) && tree.symbol != null && tree.symbol.isMacro && !tree.isDef) {
- val tree1 = expandMacro(tree)
- if (tree1.isErroneous) tree1 else typed(tree1, mode, pt)
- } else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
+ else if (inExprModeButNot(mode, FUNmode) && tree.symbol != null && tree.symbol.isMacro && !tree.isDef && !(tree exists (_.isErroneous)))
+ macroExpand(tree, this) match {
+ case Some(expanded: Tree) =>
+ typed(expanded, mode, pt)
+ case None =>
+ setError(tree) // error already reported
+ }
+ else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
adaptConstrPattern()
else if (inAllModes(mode, EXPRmode | FUNmode) &&
!tree.tpe.isInstanceOf[MethodType] &&
@@ -1987,7 +1991,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
- def typedCases(tree: Tree, cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
+ def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
@@ -2169,6 +2173,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// error for this is issued in RefChecks.checkDefaultsInOverloaded
if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
!e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
+ log("Double definition detected:\n " +
+ ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
+ ((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain)))
+
DefDefinedTwiceError(e.sym, e1.sym)
scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
@@ -3028,40 +3036,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
packSymbols(localSyms.toList, normalizedTpe)
}
- /** Replace type parameters with their TypeSkolems, which can later
- * be deskolemized to the original type param. (A skolem is a
- * representation of a bound variable when viewed inside its scope)
- * !!!Adriaan: this does not work for hk types.
- */
- def skolemizeTypeParams(tparams: List[TypeDef]): List[TypeDef] = {
- class Deskolemizer extends LazyType {
- override val typeParams = tparams map (_.symbol)
- val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
- // Replace the symbols
- def substitute() = map2(tparams, typeSkolems)(_ setSymbol _)
- override def complete(sym: Symbol) {
- // The info of a skolem is the skolemized info of the
- // actual type parameter of the skolem
- sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
- }
- }
- (new Deskolemizer).substitute()
- }
- /** Convert to corresponding type parameters all skolems of method
- * parameters which appear in `tparams`.
- */
- def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
- class DeSkolemizeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
- case _ =>
- mapOver(tp)
- }
- }
- new DeSkolemizeMap mapOver tp
- }
-
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt, true, false) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
@@ -3318,7 +3292,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
} else {
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
+ var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
if (isPastTyper || !opt.virtPatmat) {
val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
@@ -3334,7 +3308,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
case Block(vd :: Nil, tree@Match(selector, cases)) =>
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
+ var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
if (needAdapt)
cases1 = cases1 map (adaptCase(_, owntype))
@@ -4242,7 +4216,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Try(block, catches, finalizer) =>
var block1 = typed(block, pt)
- var catches1 = typedCases(tree, catches, ThrowableClass.tpe, pt)
+ var catches1 = typedCases(catches, ThrowableClass.tpe, pt)
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
@@ -4250,6 +4224,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
block1 = adapt(block1, mode, owntype)
catches1 = catches1 map (adaptCase(_, owntype))
}
+
+ if(!isPastTyper && opt.virtPatmat) {
+ catches1 = (MatchTranslator(this)).translateTry(catches1, owntype, tree.pos)
+ }
+
treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
case Throw(expr) =>
@@ -4536,13 +4515,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def expandMacro(tree: Tree): Tree =
- macroExpand(tree, context) match {
- case Some(t: Tree) => t
- case Some(t) => MacroExpandError(tree, t)
- case None => setError(tree) // error already reported
- }
-
def atOwner(owner: Symbol): Typer =
newTyper(context.make(context.tree, owner))