summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/makro/runtime/ExprUtils.scala2
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala29
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala6
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala10
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala152
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala163
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala540
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala487
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala94
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala43
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala679
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala73
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala9
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala2
-rw-r--r--src/eclipse/README23
-rw-r--r--src/eclipse/README.md52
-rw-r--r--src/eclipse/reflect/.classpath1
-rw-r--r--src/eclipse/scala-compiler/.classpath2
-rw-r--r--src/library/scala/Predef.scala8
-rw-r--r--src/library/scala/concurrent/Future.scala3
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala1
-rw-r--r--src/library/scala/concurrent/impl/Future.scala1
-rw-r--r--src/library/scala/math/BigInt.scala11
-rw-r--r--src/library/scala/package.scala9
-rw-r--r--src/library/scala/reflect/ClassManifest.scala91
-rw-r--r--src/library/scala/reflect/ClassTag.scala15
-rw-r--r--src/library/scala/reflect/Manifest.scala42
-rw-r--r--src/library/scala/reflect/NoManifest.scala2
-rw-r--r--src/library/scala/reflect/OptManifest.scala2
-rw-r--r--src/library/scala/reflect/base/Base.scala20
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala1
-rw-r--r--src/library/scala/reflect/base/Symbols.scala8
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala11
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala12
-rw-r--r--src/library/scala/reflect/base/Types.scala5
-rw-r--r--src/library/scala/reflect/package.scala41
-rw-r--r--src/library/scala/util/Either.scala (renamed from src/library/scala/Either.scala)2
-rw-r--r--src/library/scala/util/Try.scala202
-rw-r--r--src/library/scala/util/control/NonFatal.scala (renamed from src/library/scala/concurrent/impl/NonFatal.scala)23
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala39
-rw-r--r--src/reflect/scala/reflect/api/Types.scala4
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala24
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala75
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala42
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala15
63 files changed, 1955 insertions, 1288 deletions
diff --git a/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala b/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
index 4775138e5a..e301dfc2a4 100644
--- a/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
@@ -29,7 +29,7 @@ trait ExprUtils {
def literal(x: Double) = Expr[Double](Literal(Constant(x)))(TypeTag.Double)
- def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag.String)
+ def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag[String](definitions.StringClass.asTypeConstructor))
def literal(x: Char) = Expr[Char](Literal(Constant(x)))(TypeTag.Char)
}
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index b70c3f44a3..e09f13a052 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -11,20 +11,21 @@ abstract class Taggers {
import treeBuild._
val coreTags = Map(
- ByteClass.asType -> nme.Byte,
- ShortClass.asType -> nme.Short,
- CharClass.asType -> nme.Char,
- IntClass.asType -> nme.Int,
- LongClass.asType -> nme.Long,
- FloatClass.asType -> nme.Float,
- DoubleClass.asType -> nme.Double,
- BooleanClass.asType -> nme.Boolean,
- UnitClass.asType -> nme.Unit,
- AnyClass.asType -> nme.Any,
- ObjectClass.asType -> nme.Object,
- NothingClass.asType -> nme.Nothing,
- NullClass.asType -> nme.Null,
- StringClass.asType -> nme.String)
+ ByteTpe -> nme.Byte,
+ ShortTpe -> nme.Short,
+ CharTpe -> nme.Char,
+ IntTpe -> nme.Int,
+ LongTpe -> nme.Long,
+ FloatTpe -> nme.Float,
+ DoubleTpe -> nme.Double,
+ BooleanTpe -> nme.Boolean,
+ UnitTpe -> nme.Unit,
+ AnyTpe -> nme.Any,
+ AnyValTpe -> nme.AnyVal,
+ AnyRefTpe -> nme.AnyRef,
+ ObjectTpe -> nme.Object,
+ NothingTpe -> nme.Nothing,
+ NullTpe -> nme.Null)
def materializeClassTag(prefix: Tree, tpe: Type): Tree = {
val tagModule = ClassTagModule
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 3a98d308a7..9b0777580b 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -23,10 +23,8 @@ trait GenSymbols {
def symtab: SymbolTable = state.symtab
/** Reify a reference to a symbol */
- def reifySymRef(sym0: Symbol): Tree = {
- assert(sym0 != null, "sym is null")
- val sym = sym0.dealias
-
+ def reifySymRef(sym: Symbol): Tree = {
+ assert(sym != null, "sym is null")
if (sym == NoSymbol)
mirrorSelect(nme.NoSymbol)
else if (sym.isRootPackage)
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index b97bf6b0cd..f48df8df65 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -161,11 +161,9 @@ trait GenTrees {
if (tree.symbol.isLocalToReifee || tree.tpe.isLocalToReifee)
reifyProduct(tree)
else {
- val sym0 = tree.symbol
- val sym = sym0.dealias
- val tpe0 = tree.tpe
- val tpe = tpe0.dealias
- if (reifyDebug) println("reifying bound type %s (underlying type is %s, dealiased is %s)".format(sym0, tpe0, tpe))
+ val sym = tree.symbol
+ val tpe = tree.tpe
+ if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe))
if (tpe.isSpliceable) {
val spliced = spliceType(tpe)
@@ -187,7 +185,7 @@ trait GenTrees {
if (reifyDebug) println("tpe is locatable: reify as Ident(%s)".format(sym))
mirrorBuildCall(nme.Ident, reify(sym))
} else {
- if (reifyDebug) println("tpe is an alias, but not a locatable: reify as TypeTree(%s)".format(tpe))
+ if (reifyDebug) println("tpe is not locatable: reify as TypeTree(%s)".format(tpe))
mirrorBuildCall(nme.TypeTree, reify(tpe))
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index f4e2200edc..82951a2434 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -11,9 +11,8 @@ trait GenTypes {
* Reify a type.
* For internal use only, use ``reified'' instead.
*/
- def reifyType(tpe0: Type): Tree = {
- assert(tpe0 != null, "tpe is null")
- val tpe = tpe0.dealias
+ def reifyType(tpe: Type): Tree = {
+ assert(tpe != null, "tpe is null")
if (tpe.isErroneous)
CannotReifyErroneousReifee(tpe)
@@ -29,9 +28,9 @@ trait GenTypes {
if (spliced != EmptyTree)
return spliced
- val tsym = tpe.typeSymbol
+ val tsym = tpe.typeSymbolDirect
if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
- Select(Select(reify(tpe.typeSymbol), nme.asTypeSymbol), nme.asTypeConstructor)
+ Select(Select(reify(tsym), nme.asTypeSymbol), nme.asTypeConstructor)
else tpe match {
case tpe @ NoType =>
reifyMirrorObject(tpe)
@@ -107,13 +106,11 @@ trait GenTypes {
}
private def spliceAsManifest(tpe: Type): Tree = {
- val ManifestClass = rootMirror.staticClass("scala.reflect.Manifest")
- val ManifestModule = rootMirror.staticModule("scala.reflect.Manifest")
- def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == ManifestModule || sub.symbol.owner == ManifestModule))
+ def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == FullManifestModule || sub.symbol.owner == FullManifestModule))
def searchForManifest(typer: analyzer.Typer): Tree =
analyzer.inferImplicit(
EmptyTree,
- appliedType(ManifestClass.asTypeConstructor, List(tpe)),
+ appliedType(FullManifestClass.asTypeConstructor, List(tpe)),
reportAmbiguous = false,
isView = false,
context = typer.context,
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index f7541a4739..b638745327 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -860,7 +860,7 @@ abstract class GenICode extends SubComponent {
if (sym.isLabel) { // jump to a label
val label = ctx.labels.getOrElse(sym, {
// it is a forward jump, scan for labels
- scanForLabels(ctx.defdef, ctx)
+ resolveForwardLabel(ctx.defdef, ctx, sym)
ctx.labels.get(sym) match {
case Some(l) =>
log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
@@ -1406,21 +1406,17 @@ abstract class GenICode extends SubComponent {
def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
/**
- * Traverse the tree and store label stubs in the context. This is
- * necessary to handle forward jumps, because at a label application
- * with arguments, the symbols of the corresponding LabelDef parameters
- * are not yet known.
+ * Find the label denoted by `lsym` and enter it in context `ctx`.
*
- * Since it is expensive to traverse each method twice, this method is called
- * only when forward jumps really happen, and then it re-traverses the whole
- * method, scanning for LabelDefs.
+ * We only enter one symbol at a time, even though we might traverse the same
+ * tree more than once per method. That's because we cannot enter labels that
+ * might be duplicated (for instance, inside finally blocks).
*
* TODO: restrict the scanning to smaller subtrees than the whole method.
* It is sufficient to scan the trees of the innermost enclosing block.
*/
- //
- private def scanForLabels(tree: Tree, ctx: Context): Unit = tree foreachPartial {
- case t @ LabelDef(_, params, rhs) =>
+ private def resolveForwardLabel(tree: Tree, ctx: Context, lsym: Symbol): Unit = tree foreachPartial {
+ case t @ LabelDef(_, params, rhs) if t.symbol == lsym =>
ctx.labels.getOrElseUpdate(t.symbol, {
val locals = params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))
ctx.method addLocals locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 5f495c8456..13457bfe58 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -15,37 +15,48 @@ import scala.reflect.internal.util.{Position,NoPosition}
/*
A pattern match
- case THIS(clasz) =>
- case STORE_THIS(kind) =>
- case CONSTANT(const) =>
- case LOAD_ARRAY_ITEM(kind) =>
- case LOAD_LOCAL(local) =>
- case LOAD_FIELD(field, isStatic) =>
- case LOAD_MODULE(module) =>
- case STORE_ARRAY_ITEM(kind) =>
- case STORE_LOCAL(local) =>
- case STORE_FIELD(field, isStatic) =>
- case CALL_PRIMITIVE(primitive) =>
- case CALL_METHOD(method, style) =>
- case NEW(kind) =>
- case CREATE_ARRAY(elem, dims) =>
- case IS_INSTANCE(tpe) =>
- case CHECK_CAST(tpe) =>
- case SWITCH(tags, labels) =>
- case JUMP(whereto) =>
- case CJUMP(success, failure, cond, kind) =>
- case CZJUMP(success, failure, cond, kind) =>
- case RETURN(kind) =>
- case THROW(clasz) =>
- case DROP(kind) =>
- case DUP(kind) =>
- case MONITOR_ENTER() =>
- case MONITOR_EXIT() =>
- case BOX(boxType) =>
- case UNBOX(tpe) =>
- case SCOPE_ENTER(lv) =>
- case SCOPE_EXIT(lv) =>
- case LOAD_EXCEPTION(clasz) =>
+ // locals
+ case THIS(clasz) =>
+ case STORE_THIS(kind) =>
+ case LOAD_LOCAL(local) =>
+ case STORE_LOCAL(local) =>
+ case SCOPE_ENTER(lv) =>
+ case SCOPE_EXIT(lv) =>
+ // stack
+ case LOAD_MODULE(module) =>
+ case LOAD_EXCEPTION(clasz) =>
+ case DROP(kind) =>
+ case DUP(kind) =>
+ // constants
+ case CONSTANT(const) =>
+ // arithlogic
+ case CALL_PRIMITIVE(primitive) =>
+ // casts
+ case IS_INSTANCE(tpe) =>
+ case CHECK_CAST(tpe) =>
+ // objs
+ case NEW(kind) =>
+ case MONITOR_ENTER() =>
+ case MONITOR_EXIT() =>
+ case BOX(boxType) =>
+ case UNBOX(tpe) =>
+ // flds
+ case LOAD_FIELD(field, isStatic) =>
+ case STORE_FIELD(field, isStatic) =>
+ // mthds
+ case CALL_METHOD(method, style) =>
+ // arrays
+ case LOAD_ARRAY_ITEM(kind) =>
+ case STORE_ARRAY_ITEM(kind) =>
+ case CREATE_ARRAY(elem, dims) =>
+ // jumps
+ case SWITCH(tags, labels) =>
+ case JUMP(whereto) =>
+ case CJUMP(success, failure, cond, kind) =>
+ case CZJUMP(success, failure, cond, kind) =>
+ // ret
+ case RETURN(kind) =>
+ case THROW(clasz) =>
*/
@@ -58,11 +69,26 @@ import scala.reflect.internal.util.{Position,NoPosition}
trait Opcodes { self: ICodes =>
import global.{Symbol, NoSymbol, Type, Name, Constant};
+ // categories of ICode instructions
+ final val localsCat = 1
+ final val stackCat = 2
+ final val constCat = 3
+ final val arilogCat = 4
+ final val castsCat = 5
+ final val objsCat = 6
+ final val fldsCat = 7
+ final val mthdsCat = 8
+ final val arraysCat = 9
+ final val jumpsCat = 10
+ final val retCat = 11
+
/** This class represents an instruction of the intermediate code.
* Each case subclass will represent a specific operation.
*/
abstract class Instruction extends Cloneable {
+ def category: Int = 0 // undefined
+
/** This abstract method returns the number of used elements on the stack */
def consumed : Int = 0
@@ -118,6 +144,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(REFERENCE(clasz))
+
+ override def category = localsCat
}
/** Loads a constant on the stack.
@@ -130,6 +158,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(toTypeKind(constant.tpe))
+
+ override def category = constCat
}
/** Loads an element of an array. The array and the index should
@@ -143,6 +173,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = List(ARRAY(kind), INT)
override def producedTypes = List(kind)
+
+ override def category = arraysCat
}
/** Load a local variable on the stack. It can be a method argument.
@@ -154,6 +186,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(local.kind)
+
+ override def category = localsCat
}
/** Load a field on the stack. The object to which it refers should be
@@ -176,6 +210,8 @@ trait Opcodes { self: ICodes =>
// see #4283
var hostClass: Symbol = field.owner
def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
+
+ override def category = fldsCat
}
case class LOAD_MODULE(module: Symbol) extends Instruction {
@@ -187,6 +223,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(REFERENCE(module))
+
+ override def category = stackCat
}
/** Store a value into an array at a specified index.
@@ -198,6 +236,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override def consumedTypes = List(ARRAY(kind), INT, kind)
+
+ override def category = arraysCat
}
/** Store a value into a local variable. It can be an argument.
@@ -209,6 +249,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override def consumedTypes = List(local.kind)
+
+ override def category = localsCat
}
/** Store a value into a field.
@@ -228,6 +270,8 @@ trait Opcodes { self: ICodes =>
List(toTypeKind(field.tpe))
else
List(REFERENCE(field.owner), toTypeKind(field.tpe));
+
+ override def category = fldsCat
}
/** Store a value into the 'this' pointer.
@@ -238,6 +282,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
override def consumedTypes = List(kind)
+ override def category = localsCat
}
/** Call a primitive function.
@@ -292,6 +337,8 @@ trait Opcodes { self: ICodes =>
case StartConcat => List(ConcatClass)
case EndConcat => List(REFERENCE(global.definitions.StringClass))
}
+
+ override def category = arilogCat
}
/** This class represents a CALL_METHOD instruction
@@ -347,6 +394,8 @@ trait Opcodes { self: ICodes =>
* being able to store such instructions into maps, when more
* than one CALL_METHOD to the same method might exist.
*/
+
+ override def category = mthdsCat
}
case class BOX(boxType: TypeKind) extends Instruction {
@@ -355,6 +404,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = boxType :: Nil
override def produced = 1
+ override def category = objsCat
}
case class UNBOX(boxType: TypeKind) extends Instruction {
@@ -363,6 +413,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+ override def category = objsCat
}
/** Create a new instance of a class through the specified constructor
@@ -378,6 +429,8 @@ trait Opcodes { self: ICodes =>
/** The corresponding constructor call. */
var init: CALL_METHOD = _
+
+ override def category = objsCat
}
@@ -392,6 +445,8 @@ trait Opcodes { self: ICodes =>
override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
override def produced = 1;
+
+ override def category = arraysCat
}
/** This class represents a IS_INSTANCE instruction
@@ -405,6 +460,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+
+ override def category = castsCat
}
/** This class represents a CHECK_CAST instruction
@@ -419,6 +476,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override val consumedTypes = List(ObjectReference)
override def producedTypes = List(typ)
+
+ override def category = castsCat
}
/** This class represents a SWITCH instruction
@@ -439,6 +498,8 @@ trait Opcodes { self: ICodes =>
override val consumedTypes = List(INT)
def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
+
+ override def category = jumpsCat
}
/** This class represents a JUMP instruction
@@ -451,6 +512,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 0
+
+ override def category = jumpsCat
}
/** This class represents a CJUMP instruction
@@ -474,6 +537,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override val consumedTypes = List(kind, kind)
+
+ override def category = jumpsCat
}
/** This class represents a CZJUMP instruction
@@ -495,6 +560,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override val consumedTypes = List(kind)
+
+ override def category = jumpsCat
}
@@ -507,6 +574,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
// TODO override val consumedTypes = List(kind)
+
+ override def category = retCat
}
/** This class represents a THROW instruction
@@ -522,6 +591,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = retCat
}
/** This class represents a DROP instruction
@@ -534,6 +605,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = stackCat
}
/** This class represents a DUP instruction
@@ -543,6 +616,7 @@ trait Opcodes { self: ICodes =>
case class DUP (typ: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 2
+ override def category = stackCat
}
/** This class represents a MONITOR_ENTER instruction
@@ -555,6 +629,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = objsCat
}
/** This class represents a MONITOR_EXIT instruction
@@ -567,6 +643,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1;
override def produced = 0;
+
+ override def category = objsCat
}
/** A local variable becomes visible at this point in code.
@@ -577,6 +655,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_ENTER " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** A local variable leaves its scope at this point in code.
@@ -587,6 +666,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_EXIT " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** Fake instruction. It designates the VM who pushes an exception
@@ -598,6 +678,7 @@ trait Opcodes { self: ICodes =>
override def consumed = sys.error("LOAD_EXCEPTION does clean the whole stack, no idea how many things it consumes!")
override def produced = 1
override def producedTypes = REFERENCE(clasz) :: Nil
+ override def category = stackCat
}
/** This class represents a method invocation style. */
@@ -658,6 +739,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(msil_mgdptr(local.kind))
+
+ override def category = localsCat
}
case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
@@ -670,6 +753,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
override def producedTypes = List(msil_mgdptr(REFERENCE(field.owner)));
+
+ override def category = fldsCat
}
case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
@@ -681,6 +766,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = List(ARRAY(kind), INT)
override def producedTypes = List(msil_mgdptr(kind))
+
+ override def category = arraysCat
}
case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
@@ -689,6 +776,7 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = ObjectReference :: Nil // actually consumes a 'boxed valueType'
override def produced = 1
override def producedTypes = List(msil_mgdptr(valueType))
+ override def category = objsCat
}
case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
@@ -696,6 +784,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil // actually consumes a managed pointer
override def produced = 0
+ override def category = objsCat
}
case class CIL_NEWOBJ(method: Symbol) extends Instruction {
@@ -705,6 +794,7 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = method.tpe.paramTypes map toTypeKind
override def produced = 1
override def producedTypes = List(toTypeKind(method.tpe.resultType))
+ override def category = objsCat
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index 290979d205..663b626bef 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -38,19 +38,21 @@ trait Repository {
}
/** Load bytecode for given symbol. */
- def load(sym: Symbol) {
+ def load(sym: Symbol): Boolean = {
try {
val (c1, c2) = icodeReader.readClass(sym)
- assert(c1.symbol == sym || c2.symbol == sym,
- "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
+ assert(c1.symbol == sym || c2.symbol == sym, "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
loaded += (c1.symbol -> c1)
loaded += (c2.symbol -> c2)
+
+ true
} catch {
case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug.value)
- e.printStackTrace
+ if (settings.debug.value) { e.printStackTrace }
+
+ false
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index d31eafff48..c3fbf31cc6 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -174,11 +174,8 @@ abstract class TypeFlowAnalysis {
}
i match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
+ case THIS(clasz) => stack push toTypeKind(clasz.tpe)
+ case CONSTANT(const) => stack push toTypeKind(const.tpe)
case LOAD_ARRAY_ITEM(kind) =>
stack.pop2 match {
@@ -194,139 +191,73 @@ abstract class TypeFlowAnalysis {
stack push (if (t == typeLattice.bottom) local.kind else t)
case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
+ if (!isStatic) { stack.pop }
stack push toTypeKind(field.tpe)
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
-
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
+ case LOAD_MODULE(module) => stack push toTypeKind(module.tpe)
+ case STORE_ARRAY_ITEM(kind) => stack.pop3
+ case STORE_LOCAL(local) => val t = stack.pop; bindings += (local -> t)
+ case STORE_THIS(_) => stack.pop
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
+ case STORE_FIELD(field, isStatic) => if (isStatic) stack.pop else stack.pop2
case CALL_PRIMITIVE(primitive) =>
primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
+ case Negation(kind) => stack.pop; stack.push(kind)
+
case Test(_, kind, zero) =>
stack.pop
- if (!zero) stack.pop
+ if (!zero) { stack.pop }
stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
+
+ case Comparison(_, _) => stack.pop2; stack push INT
case Arithmetic(op, kind) =>
stack.pop
- if (op != NOT)
- stack.pop
+ if (op != NOT) { stack.pop }
val k = kind match {
case BYTE | SHORT | CHAR => INT
case _ => kind
}
stack push k
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
+ case Logical(op, kind) => stack.pop2; stack push kind
+ case Shift(op, kind) => stack.pop2; stack push kind
+ case Conversion(src, dst) => stack.pop; stack push dst
+ case ArrayLength(kind) => stack.pop; stack push INT
+ case StartConcat => stack.push(ConcatClass)
+ case EndConcat => stack.pop; stack.push(STRING)
+ case StringConcat(el) => stack.pop2; stack push ConcatClass
}
case cm @ CALL_METHOD(_, _) =>
stack pop cm.consumed
cm.producedTypes foreach (stack push _)
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
-
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
-
- case NEW(kind) =>
- stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
-
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
+ case BOX(kind) => stack.pop; stack.push(BOXED(kind))
+ case UNBOX(kind) => stack.pop; stack.push(kind)
- case SWITCH(tags, labels) =>
- stack.pop
+ case NEW(kind) => stack.push(kind)
- case JUMP(whereto) =>
- ()
+ case CREATE_ARRAY(elem, dims) => stack.pop(dims); stack.push(ARRAY(elem))
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
+ case IS_INSTANCE(tpe) => stack.pop; stack.push(BOOL)
+ case CHECK_CAST(tpe) => stack.pop; stack.push(tpe)
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
+ case _: SWITCH => stack.pop
+ case _: JUMP => ()
+ case _: CJUMP => stack.pop2
+ case _: CZJUMP => stack.pop
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
+ case RETURN(kind) => if (kind != UNIT) { stack.pop }
+ case THROW(_) => stack.pop
- case THROW(_) =>
- stack.pop
+ case DROP(kind) => stack.pop
+ case DUP(kind) => stack.push(stack.head)
- case DROP(kind) =>
- stack.pop
+ case MONITOR_ENTER() | MONITOR_EXIT() => stack.pop
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) => ()
case LOAD_EXCEPTION(clasz) =>
stack.pop(stack.length)
@@ -551,14 +482,24 @@ abstract class TypeFlowAnalysis {
val relevantBBs = mutable.Set.empty[BasicBlock]
+ /*
+ * Rationale to prevent some methods from ever being inlined:
+ *
+ * (1) inlining getters and setters results in exposing a private field,
+ * which may itself prevent inlining of the caller (at best) or
+ * lead to situations like SI-5442 ("IllegalAccessError when mixing optimized and unoptimized bytecode")
+ *
+ * (2) only invocations having a receiver object are considered (ie no static-methods are ever inlined).
+ * This is taken care of by checking `isDynamic` (ie virtual method dispatch) and `Static(true)` (ie calls to private members)
+ */
private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
val msym = cm.method
val style = cm.style
- // Dynamic == normal invocations
- // Static(true) == calls to private members
- !msym.isConstructor && !blackballed(msym) &&
- (style.isDynamic || (style.hasInstance && style.isStatic))
- // && !(msym hasAnnotation definitions.ScalaNoInlineClass)
+
+ !blackballed(msym) &&
+ !msym.isConstructor &&
+ (!msym.isAccessor || inliner.isClosureClass(msym.owner)) &&
+ (style.isDynamic || (style.hasInstance && style.isStatic))
}
override def init(m: icodes.IMethod) {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index ff68aba845..42921e733d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -37,7 +37,7 @@ trait BytecodeWriters {
getFile(outputDirectory(sym), clsName, suffix)
trait BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit
def close(): Unit = ()
}
@@ -48,7 +48,9 @@ trait BytecodeWriters {
)
val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile == null,
+ "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.")
val path = jclassName + ".class"
val out = writer.newOutputStream(path)
@@ -72,21 +74,21 @@ trait BytecodeWriters {
try javap(Seq("-verbose", "dummy")) foreach (_.show())
finally pw.close()
}
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
- val bytes = getFile(sym, jclassName, ".class").toByteArray
val segments = jclassName.split("[./]")
val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
javapFile.parent.createDirectory()
- emitJavap(bytes, javapFile)
+ emitJavap(jclassBytes, javapFile)
}
}
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- val outfile = getFile(sym, jclassName, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile != null,
+ "Precisely this override requires its invoker to hand out a non-null AbstractFile.")
val outstream = new DataOutputStream(outfile.bufferedOutput)
try outstream.write(jclassBytes, 0, jclassBytes.length)
@@ -98,8 +100,8 @@ trait BytecodeWriters {
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
val pathName = jclassName
var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 59adcc637a..5ab8a3d751 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -29,6 +29,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val phaseName = "jvm"
+ case class WorkUnit(label: String, jclassName: String, jclass: asm.ClassWriter, outF: AbstractFile)
+
+ type WorkUnitQueue = _root_.java.util.concurrent.LinkedBlockingQueue[WorkUnit]
+
/** Create a new phase */
override def newPhase(p: Phase): Phase = new AsmPhase(p)
@@ -148,6 +152,44 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
+ // -----------------------------------------------------------------------------------------
+ // Allow overlapping disk write of classfiles with building of the next classfiles.
+ // -----------------------------------------------------------------------------------------
+
+ val q = new WorkUnitQueue(500)
+
+ class WriteTask(bytecodeWriter: BytecodeWriter) extends _root_.java.lang.Runnable {
+
+ def run() {
+ var stop = false
+ try {
+ while (!stop) {
+ val WorkUnit(label, jclassName, jclass, outF) = q.take
+ if(jclass eq null) { stop = true }
+ else { writeIfNotTooBig(label, jclassName, jclass, outF) }
+ }
+ } catch {
+ case ex: InterruptedException => throw ex
+ }
+ }
+
+ private def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, outF: AbstractFile) {
+ try {
+ val arr = jclass.toByteArray()
+ bytecodeWriter.writeClass(label, jclassName, arr, outF)
+ } catch {
+ case e: java.lang.RuntimeException if(e.getMessage() == "Class file too large!") =>
+ // TODO check where ASM throws the equivalent of CodeSizeTooBigException
+ log("Skipped class "+jclassName+" because it exceeds JVM limits (it's too big or has methods that are too long).")
+ }
+ }
+
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // what AsmPhase does.
+ // -----------------------------------------------------------------------------------------
+
override def run() {
if (settings.debug.value)
@@ -161,10 +203,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
var sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
debuglog("Created new bytecode generator for " + classes.size + " classes.")
- val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
- val plainCodeGen = new JPlainBuilder(bytecodeWriter)
- val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
- val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
+ val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
+ val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+
+ val plainCodeGen = new JPlainBuilder(q, needsOutfileForSymbol)
+ val mirrorCodeGen = new JMirrorBuilder(q, needsOutfileForSymbol)
+ val beanInfoCodeGen = new JBeanInfoBuilder(q, needsOutfileForSymbol)
+
+ new _root_.java.lang.Thread(new WriteTask(bytecodeWriter)).start()
while(!sortedClasses.isEmpty) {
val c = sortedClasses.head
@@ -187,6 +233,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
classes -= c.symbol // GC opportunity
}
+ q put WorkUnit(null, null, null, null)
+
+ while(!q.isEmpty) { _root_.java.lang.Thread.sleep(10) }
+
bytecodeWriter.close()
classes.clear()
reverseJavaName.clear()
@@ -448,7 +498,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val JAVA_LANG_STRING = asm.Type.getObjectType("java/lang/String")
/** basic functionality for class file building */
- abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+ abstract class JBuilder(wuQ: WorkUnitQueue, needsOutfileForSymbol: Boolean) {
val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
val EMPTY_STRING_ARRAY = Array.empty[String]
@@ -504,17 +554,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// utitilies useful when emitting plain, mirror, and beaninfo classes.
// -----------------------------------------------------------------------------------------
- def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
- try {
- val arr = jclass.toByteArray()
- bytecodeWriter.writeClass(label, jclassName, arr, sym)
- } catch {
- case e: java.lang.RuntimeException if(e.getMessage() == "Class file too large!") =>
- // TODO check where ASM throws the equivalent of CodeSizeTooBigException
- log("Skipped class "+jclassName+" because it exceeds JVM limits (it's too big or has methods that are too long).")
- }
- }
-
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
*/
@@ -728,11 +767,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
+ def enqueue(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
+ val outF: scala.tools.nsc.io.AbstractFile = {
+ if(needsOutfileForSymbol) getFile(sym, jclassName, ".class") else null
+ }
+ wuQ put WorkUnit(label, jclassName, jclass, outF)
+ }
+
} // end of class JBuilder
/** functionality for building plain and mirror classes */
- abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ abstract class JCommonBuilder(wuQ: WorkUnitQueue, needsOutfileForSymbol: Boolean) extends JBuilder(wuQ, needsOutfileForSymbol) {
// -----------------------------------------------------------------------------------------
// more constants
@@ -1290,8 +1336,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case class BlockInteval(start: BasicBlock, end: BasicBlock)
/** builder of plain classes */
- class JPlainBuilder(bytecodeWriter: BytecodeWriter)
- extends JCommonBuilder(bytecodeWriter)
+ class JPlainBuilder(wuQ: WorkUnitQueue, needsOutfileForSymbol: Boolean)
+ extends JCommonBuilder(wuQ, needsOutfileForSymbol)
with JAndroidBuilder {
val MIN_SWITCH_DENSITY = 0.7
@@ -1445,7 +1491,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
addInnerClasses(clasz.symbol, jclass)
jclass.visitEnd()
- writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
+ enqueue("" + c.symbol.name, thisName, jclass, c.symbol)
}
@@ -2366,251 +2412,269 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- instr match {
- case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
-
- case CONSTANT(const) => genConstant(jmethod, const)
-
- case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
-
- case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
- jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- } else {
- jmethod.visitFieldInsn(
- Opcodes.GETSTATIC,
- javaName(module) /* + "$" */ ,
- strMODULE_INSTANCE_FIELD,
- descriptor(module)
- )
- }
+ (instr.category: @scala.annotation.switch) match {
+
+ case icodes.localsCat => (instr: @unchecked) match {
+ case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
+ case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+
+ case SCOPE_ENTER(lv) =>
+ // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val start = new asm.Label
+ jmethod.visitLabel(start)
+ scoping.pushScope(lv, start)
+ }
+
+ case SCOPE_EXIT(lv) =>
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) {
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val end = new asm.Label
+ jmethod.visitLabel(end)
+ scoping.popScope(lv, end)
+ }
+ }
- case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case icodes.stackCat => (instr: @unchecked) match {
- case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ } else {
+ jmethod.visitFieldInsn(
+ Opcodes.GETSTATIC,
+ javaName(module) /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ descriptor(module)
+ )
+ }
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+ case DROP(kind) => emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+ case DUP(kind) => emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
- case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
+ case LOAD_EXCEPTION(_) => ()
+ }
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getInternalName
- jcode.invokevirtual(target, "clone", mdesc_arrayClone)
+ case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
+ case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
- case BOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case icodes.castsCat => (instr: @unchecked) match {
- case UNBOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case IS_INSTANCE(tpe) =>
+ val jtyp: asm.Type =
+ tpe match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jmethod.visitTypeInsn(Opcodes.NEW, className)
+ case CHECK_CAST(tpe) =>
+ tpe match {
- case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case REFERENCE(cls) =>
+ if (cls != ObjectClass) { // No need to checkcast for Objects
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
+ }
- case CREATE_ARRAY(elem, dims) =>
- jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ case ARRAY(elem) =>
+ val iname = javaArrayType(javaType(elem)).getInternalName
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
- case IS_INSTANCE(tpe) =>
- val jtyp: asm.Type =
- tpe match {
- case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
- case ARRAY(elem) => javaArrayType(javaType(elem))
case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
}
- jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
- case CHECK_CAST(tpe) =>
- tpe match {
+ }
- case REFERENCE(cls) =>
- if (cls != ObjectClass) { // No need to checkcast for Objects
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
- }
+ case icodes.objsCat => (instr: @unchecked) match {
- case ARRAY(elem) =>
- val iname = javaArrayType(javaType(elem)).getInternalName
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
+ case BOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case UNBOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- case sw @ SWITCH(tagss, branches) =>
- assert(branches.length == tagss.length + 1, sw)
- val flatSize = sw.flatTagsCount
- val flatKeys = new Array[Int](flatSize)
- val flatBranches = new Array[asm.Label](flatSize)
-
- var restTagss = tagss
- var restBranches = branches
- var k = 0 // ranges over flatKeys and flatBranches
- while(restTagss.nonEmpty) {
- val currLabel = labels(restBranches.head)
- for(cTag <- restTagss.head) {
- flatKeys(k) = cTag;
- flatBranches(k) = currLabel
- k += 1
- }
- restTagss = restTagss.tail
- restBranches = restBranches.tail
- }
- val defaultLabel = labels(restBranches.head)
- assert(restBranches.tail.isEmpty)
- debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
- jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
-
- case JUMP(whereto) =>
- if (nextBlock != whereto) {
- jcode goTo labels(whereto)
- }
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jmethod.visitTypeInsn(Opcodes.NEW, className)
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else {
- (kind: @unchecked) match {
- case LONG => emit(Opcodes.LCMP)
- case FLOAT =>
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
+ case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ }
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- // @unchecked because references aren't compared with GT, GE, LT, LE.
- ((cond, nextBlock) : @unchecked) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode goTo labels(failure)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode goTo labels(failure)
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- emit(Opcodes.LCONST_0)
- emit(Opcodes.LCMP)
- case FLOAT =>
- emit(Opcodes.FCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- emit(Opcodes.DCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
+ case icodes.fldsCat => (instr: @unchecked) match {
- case RETURN(kind) => jcode emitRETURN kind
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
- case THROW(_) => emit(Opcodes.ATHROW)
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
- case DROP(kind) =>
- emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
+ }
- case DUP(kind) =>
- emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
+ case icodes.mthdsCat => (instr: @unchecked) match {
- case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getInternalName
+ jcode.invokevirtual(target, "clone", mdesc_arrayClone)
- case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
- case SCOPE_ENTER(lv) =>
- // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val start = new asm.Label
- jmethod.visitLabel(start)
- scoping.pushScope(lv, start)
- }
+ }
- case SCOPE_EXIT(lv) =>
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) {
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val end = new asm.Label
- jmethod.visitLabel(end)
- scoping.popScope(lv, end)
- }
+ case icodes.arraysCat => (instr: @unchecked) match {
+ case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
+ case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ }
+
+ case icodes.jumpsCat => (instr: @unchecked) match {
+
+ case sw @ SWITCH(tagss, branches) =>
+ assert(branches.length == tagss.length + 1, sw)
+ val flatSize = sw.flatTagsCount
+ val flatKeys = new Array[Int](flatSize)
+ val flatBranches = new Array[asm.Label](flatSize)
+
+ var restTagss = tagss
+ var restBranches = branches
+ var k = 0 // ranges over flatKeys and flatBranches
+ while(restTagss.nonEmpty) {
+ val currLabel = labels(restBranches.head)
+ for(cTag <- restTagss.head) {
+ flatKeys(k) = cTag;
+ flatBranches(k) = currLabel
+ k += 1
+ }
+ restTagss = restTagss.tail
+ restBranches = restBranches.tail
+ }
+ val defaultLabel = labels(restBranches.head)
+ assert(restBranches.tail.isEmpty)
+ debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
+ jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto) {
+ jcode goTo labels(whereto)
+ }
+
+ case CJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF_ICMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ICMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ if (nextBlock == success) {
+ jcode.emitIF_ACMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ACMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ case FLOAT =>
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ case CZJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ val Success = success
+ val Failure = failure
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock) : @unchecked) match {
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
+ case (EQ, _) =>
+ jcode emitIFNULL labels(success)
+ jcode goTo labels(failure)
+ case (NE, _) =>
+ jcode emitIFNONNULL labels(success)
+ jcode goTo labels(failure)
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG =>
+ emit(Opcodes.LCONST_0)
+ emit(Opcodes.LCMP)
+ case FLOAT =>
+ emit(Opcodes.FCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(Opcodes.DCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ }
+
+ case icodes.retCat => (instr: @unchecked) match {
+ case RETURN(kind) => jcode emitRETURN kind
+ case THROW(_) => emit(Opcodes.ATHROW)
+ }
- case LOAD_EXCEPTION(_) =>
- ()
}
}
@@ -2863,7 +2927,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
/** builder of mirror classes */
- class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+ class JMirrorBuilder(wuQ: WorkUnitQueue, needsOutfileForSymbol: Boolean) extends JCommonBuilder(wuQ, needsOutfileForSymbol) {
private var cunit: CompilationUnit = _
def getCurrentCUnit(): CompilationUnit = cunit;
@@ -2907,7 +2971,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
addInnerClasses(modsym, mirrorClass)
mirrorClass.visitEnd()
- writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
+ enqueue("" + modsym.name, mirrorName, mirrorClass, modsym)
}
@@ -2915,7 +2979,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
/** builder of bean info classes */
- class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ class JBeanInfoBuilder(wuQ: WorkUnitQueue, needsOutfileForSymbol: Boolean) extends JBuilder(wuQ, needsOutfileForSymbol) {
/**
* Generate a bean info class that describes the given class.
@@ -3036,7 +3100,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
- writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
+ enqueue("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
}
} // end of class JBeanInfoBuilder
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 21260d399c..ad054015ef 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -183,7 +183,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil {
def this() = this(new ClassBytecodeWriter { })
def debugLevel = settings.debuginfo.indexOfChoice
- import bytecodeWriter.writeClass
val MIN_SWITCH_DENSITY = 0.7
val INNER_CLASSES_FLAGS =
@@ -344,6 +343,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
}
+ val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ val outF: scala.tools.nsc.io.AbstractFile = {
+ if(needsOutfileForSymbol) getFile(sym, jclassName, ".class") else null
+ }
+ bytecodeWriter.writeClass(label, jclassName, jclassBytes, outF)
+ }
+
/** Returns the ScalaSignature annotation if it must be added to this class,
* none otherwise; furthermore, it adds to `jclass` the ScalaSig marker
* attribute (marking that a scala signature annotation is present) or the
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index f332e8cfdd..44acfed411 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -12,6 +12,29 @@ import scala.tools.nsc.symtab._
import scala.reflect.internal.util.NoSourceFile
/**
+ * Inliner balances two competing goals:
+ * (a) aggressive inlining of:
+ * (a.1) the apply methods of anonymous closures, so that their anon-classes can be eliminated;
+ * (a.2) higher-order-methods defined in an external library, e.g. `Range.foreach()` among many others.
+ * (b) circumventing the barrier to inter-library inlining that private accesses in the callee impose.
+ *
+ * Summing up the discussion in SI-5442 and SI-5891,
+ * the current implementation achieves to a large degree both goals above, and
+ * overcomes a problem exhibited by previous versions:
+ *
+ * (1) Problem: Attempting to access a private member `p` at runtime resulting in an `IllegalAccessError`,
+ * where `p` is defined in a library L, and is accessed from a library C (for Client),
+ * where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
+ * The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
+ * (the accesibility of methods and constructors isn't touched by the inliner).
+ *
+ * Thus we add one more goal to our list:
+ * (c) Compile C (either optimized or not) against any of L or L',
+ * so that it runs with either L or L' (in particular, compile against L' and run with L).
+ *
+ * The chosen strategy is described in some detail in the comments for `accessRequirements()` and `potentiallyPublicized()`.
+ * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2011Q4/Inliner.pdf
+ *
* @author Iulian Dragos
*/
abstract class Inliners extends SubComponent {
@@ -50,6 +73,8 @@ abstract class Inliners extends SubComponent {
)
def lookup(clazz: Symbol): Symbol = {
// println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
+ ", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
else sym.overridingSymbol(clazz) match {
case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
@@ -86,13 +111,27 @@ abstract class Inliners extends SubComponent {
def name = phaseName
val inliner = new Inliner
- override def apply(c: IClass) {
- inliner analyzeClass c
+ object iclassOrdering extends Ordering[IClass] {
+ def compare(a: IClass, b: IClass) = {
+ val sourceNamesComparison = (a.cunit.toString() compare b.cunit.toString())
+ if(sourceNamesComparison != 0) sourceNamesComparison
+ else {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
}
+ val queue = new mutable.PriorityQueue[IClass]()(iclassOrdering)
+
+ override def apply(c: IClass) { queue += c }
override def run() {
try {
super.run()
+ for(c <- queue) { inliner analyzeClass c }
} finally {
inliner.clearCaches()
}
@@ -138,7 +177,8 @@ abstract class Inliners extends SubComponent {
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
- private def getRecentTFA(incm: IMethod): (Boolean, analysis.MethodTFA) = {
+
+ private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
def containsRETURN(blocks: List[BasicBlock]) = blocks exists { bb => bb.lastInstruction.isInstanceOf[RETURN] }
@@ -154,7 +194,7 @@ abstract class Inliners extends SubComponent {
var a: analysis.MethodTFA = null
if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
- if(hasInline(incm.symbol)) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
+ if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
(hasRETURN, a)
}
@@ -174,12 +214,22 @@ abstract class Inliners extends SubComponent {
tfa.isOnWatchlist.clear()
}
+ object imethodOrdering extends Ordering[IMethod] {
+ def compare(a: IMethod, b: IMethod) = {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
+
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
debuglog("Analyzing " + cls)
this.currentIClazz = cls
- val ms = cls.methods filterNot { _.symbol.isConstructor }
+ val ms = cls.methods filterNot { _.symbol.isConstructor } sorted imethodOrdering
ms foreach { im =>
if(hasInline(im.symbol)) {
log("Not inlining into " + im.symbol.originalName.decode + " because it is marked @inline.")
@@ -221,7 +271,7 @@ abstract class Inliners extends SubComponent {
* The ensuing analysis of each candidate (performed by `analyzeInc()`)
* may result in a CFG isomorphic to that of the callee being inserted in place of the callsite
* (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG,
- * a situation we call "successful inlining").
+ * a substitution we call "successful inlining").
*
* (3) following iterations have `relevantBBs` updated to focus on the inlined basic blocks and their successors only.
* Details in `MTFAGrowable.reinit()`
@@ -280,20 +330,23 @@ abstract class Inliners extends SubComponent {
}
/**
- Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
- at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
-
+ * Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
+ * at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
+ *
*/
def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
+ assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
+
var inlined = false
- val msym = i.method
+ val shouldWarn = hasInline(i.method)
- def warnNoInline(reason: String) = {
- if (hasInline(msym) && !caller.isBridge)
- warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
- }
+ def warnNoInline(reason: String) = {
+ if (shouldWarn) {
+ warn(i.pos, "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason))
+ }
+ }
- def isAvailable = icodes available concreteMethod.enclClass
+ var isAvailable = icodes available concreteMethod.enclClass
if (!isAvailable && shouldLoadImplFor(concreteMethod, receiver)) {
// Until r22824 this line was:
@@ -304,21 +357,23 @@ abstract class Inliners extends SubComponent {
// was the proximate cause for SI-3882:
// error: Illegal index: 0 overlaps List((variable par1,LONG))
// error: Illegal index: 0 overlaps List((variable par1,LONG))
- icodes.load(concreteMethod.enclClass)
+ isAvailable = icodes.load(concreteMethod.enclClass)
}
- def isCandidate = (
- isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinal
- || receiver.isEffectivelyFinal
- )
- def isApply = concreteMethod.name == nme.apply
- def isCountable = !(
- isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
+ def isCandidate = (
+ isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isEffectivelyFinal
+ )
+
+ def isApply = concreteMethod.name == nme.apply
+
+ def isCountable = !(
+ isClosureClass(receiver)
+ || isApply
+ || isMonadicMethod(concreteMethod)
+ || receiver.enclosingPackage == definitions.RuntimePackage
+ ) // only count non-closures
debuglog("Treating " + i
+ "\n\treceiver: " + receiver
@@ -327,42 +382,62 @@ abstract class Inliners extends SubComponent {
if (isAvailable && isCandidate) {
lookupIMethod(concreteMethod, receiver) match {
- case Some(callee) =>
+
+ case Some(callee) if callee.hasCode =>
val inc = new IMethodInfo(callee)
val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- if (pair isStampedForInlining stackLength) {
- retry = true
- inlined = true
- if (isCountable)
- count += 1
-
- pair.doInline(bb, i)
- if (!inc.inline || inc.isMonadic)
- caller.inlinedCalls += 1
- inlinedMethodCount(inc.sym) += 1
-
- /* Remove this method from the cache, as the calls-private relation
- * might have changed after the inlining.
- */
- usesNonPublics -= m
- recentTFAs -= m.symbol
- }
- else {
- if (settings.debug.value)
- pair logFailure stackLength
+ (pair isStampedForInlining stackLength) match {
+
+ case inlInfo if inlInfo.isSafe =>
+
+ (inlInfo: @unchecked) match {
+
+ case FeasibleInline(accessNeeded, toBecomePublic) =>
+ for(f <- toBecomePublic) {
+ debuglog("Making public (synthetic) field-symbol: " + f)
+ f setFlag Flags.notPRIVATE
+ f setFlag Flags.notPROTECTED
+ }
+ // only add to `knownSafe` after all `toBecomePublic` fields actually made public.
+ if(accessNeeded == NonPublicRefs.Public) { tfa.knownSafe += inc.sym }
+
+ case InlineableAtThisCaller => ()
+
+ }
+
+ retry = true
+ inlined = true
+ if (isCountable) { count += 1 };
+
+ pair.doInline(bb, i)
+ if (!pair.isInlineForced || inc.isMonadic) { caller.inlinedCalls += 1 };
+ inlinedMethodCount(inc.sym) += 1
- warnNoInline(pair failureReason stackLength)
+ // Remove the caller from the cache (this inlining might have changed its calls-private relation).
+ usesNonPublics -= m
+ recentTFAs -= m.symbol
+
+
+ case DontInlineHere(msg) =>
+ debuglog("inline failed, reason: " + msg)
+ warnNoInline(msg)
+
+ case NeverSafeToInline => ()
}
+
+ case Some(callee) =>
+ assert(!callee.hasCode, "The case clause right before this one should have handled this case.")
+ warnNoInline("callee (" + callee + ") has no code")
+ ()
+
case None =>
warnNoInline("bytecode was not available")
debuglog("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
}
+ } else {
+ warnNoInline(if(!isAvailable) "bytecode was not available" else "it can be overridden")
}
- else warnNoInline(
- if (!isAvailable) "bytecode was not available"
- else "it can be overridden"
- )
inlined
}
@@ -398,6 +473,7 @@ abstract class Inliners extends SubComponent {
tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
tfa.run
+
staleOut.clear()
splicedBlocks.clear()
staleIn.clear()
@@ -491,11 +567,8 @@ abstract class Inliners extends SubComponent {
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
- def inline = hasInline(sym)
- def noinline = hasNoInline(sym)
-
def isBridge = sym.isBridge
- def isInClosure = isClosureClass(owner)
+ val isInClosure = isClosureClass(owner)
val isHigherOrder = isHigherOrderMethod(sym)
def isMonadic = isMonadicMethod(sym)
@@ -511,20 +584,131 @@ abstract class Inliners extends SubComponent {
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
def hasHandlers = handlers.nonEmpty
+
+ def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
case _: Finalizer => true
case _ => false
}
- // the number of inlined calls in 'm', used by 'shouldInline'
+ // the number of inlined calls in 'm', used by 'isScoreOK'
var inlinedCalls = 0
def addLocals(ls: List[Local]) = m.locals ++= ls
def addLocal(l: Local) = addLocals(List(l))
def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
+
+ /**
+ * This method inspects the callee's instructions, finding out the most restrictive accessibility implied by them.
+ *
+ * Rather than giving up upon encountering an access to a private field `p`, it provisorily admits `p` as "can-be-made-public", provided:
+ * - `p` is being compiled as part of this compilation run, and
+ * - `p` is synthetic or param-accessor.
+ *
+ * This method is side-effect free, in particular it lets the invoker decide
+ * whether the accessibility of the `toBecomePublic` fields should be changed or not.
+ */
+ def accessRequirements: AccessReq = {
+
+ var toBecomePublic: List[Symbol] = Nil
+
+ def check(sym: Symbol, cond: Boolean) =
+ if (cond) Private
+ else if (sym.isProtected) Protected
+ else Public
+
+ def canMakePublic(f: Symbol): Boolean =
+ (m.sourceFile ne NoSourceFile) &&
+ (f.isSynthetic || f.isParamAccessor) &&
+ { toBecomePublic = f :: toBecomePublic; true }
+
+ /* A safety check to consider as private, for the purposes of inlining, a public field that:
+ * (1) is defined in an external library, and
+ * (2) can be presumed synthetic (due to a dollar sign in its name).
+ * Such field was made public by `doMakePublic()` and we don't want to rely on that,
+ * because under other compilation conditions (ie no -optimize) that won't be the case anymore.
+ *
+ * This allows aggressive intra-library inlining (making public if needed)
+ * that does not break inter-library scenarios (see comment for `Inliners`).
+ *
+ * TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
+ * (eg by having ICodeReader use unpickler, see SI-5442).
+ * */
+ def potentiallyPublicized(f: Symbol): Boolean = {
+ (m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
+ }
+
+ def checkField(f: Symbol) = check(f, potentiallyPublicized(f) ||
+ (f.isPrivate && !canMakePublic(f)))
+ def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
+ def checkMethod(n: Symbol) = check(n, n.isPrivate)
+
+ def getAccess(i: Instruction) = i match {
+ case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
+ case CALL_METHOD(n, _) => checkMethod(n)
+ case LOAD_FIELD(f, _) => checkField(f)
+ case STORE_FIELD(f, _) => checkField(f)
+ case _ => Public
+ }
+
+ var seen = Public
+ val iter = instructions.iterator
+ while((seen ne Private) && iter.hasNext) {
+ val i = iter.next()
+ getAccess(i) match {
+ case Private =>
+ log("instruction " + i + " requires private access.")
+ toBecomePublic = Nil
+ seen = Private
+ case Protected => seen = Protected
+ case _ => ()
+ }
+ }
+
+ AccessReq(seen, toBecomePublic)
+ }
+
}
- class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+ /**
+ * Classifies a pair (caller, callee) into one of four categories:
+ *
+ * (a) inlining should be performed, classified in turn into:
+ * (a.1) `InlineableAtThisCaller`: unconditionally at this caller
+ * (a.2) `FeasibleInline`: it only remains for certain access requirements to be met (see `IMethodInfo.accessRequirements()`)
+ *
+ * (b) inlining shouldn't be performed, classified in turn into:
+ * (b.1) `DontInlineHere`: indicates that this particular occurrence of the callee at the caller shouldn't be inlined.
+ * - Nothing is said about the outcome for other callers, or for other occurrences of the callee for the same caller.
+ * - In particular inlining might be possible, but heuristics gave a low score for it.
+ * (b.2) `NeverSafeToInline`: the callee can't be inlined anywhere, irrespective of caller.
+ *
+ * The classification above is computed by `isStampedForInlining()` based on which `analyzeInc()` goes on to:
+ * - either log the reason for failure --- case (b) ---,
+ * - or perform inlining --- case (a) ---.
+ */
+ sealed abstract class InlineSafetyInfo {
+ def isSafe = false
+ def isUnsafe = !isSafe
+ }
+ case object NeverSafeToInline extends InlineSafetyInfo
+ case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
+ case class DontInlineHere(msg: String) extends InlineSafetyInfo
+ case class FeasibleInline(accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]) extends InlineSafetyInfo {
+ override def isSafe = true
+ }
+
+ case class AccessReq(
+ accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]
+ )
+
+ final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+
+ assert(!caller.isBridge && inc.m.hasCode,
+ "A guard in Inliner.analyzeClass() should have prevented from getting here.")
+
def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
private def freshName(s: String): TermName = {
@@ -532,6 +716,12 @@ abstract class Inliners extends SubComponent {
newTermName(s + fresh(s))
}
+ private def isKnownToInlineSafely: Boolean = { tfa.knownSafe(inc.sym) }
+
+ val isInlineForced = hasInline(inc.sym)
+ val isInlineForbidden = hasNoInline(inc.sym)
+ assert(!(isInlineForced && isInlineForbidden), "method ("+inc.m+") marked both @inline and @noinline.")
+
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
@@ -549,7 +739,7 @@ abstract class Inliners extends SubComponent {
def newLocal(baseName: String, kind: TypeKind) =
new Local(caller.sym.newVariable(freshName(baseName), targetPos), kind, false)
- val (hasRETURN, a) = getRecentTFA(inc.m)
+ val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
/* The exception handlers that are active at the current block. */
val activeHandlers = caller.handlers filter (_ covered block)
@@ -709,129 +899,94 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value) icodes.checkValid(caller.m)
}
- def isStampedForInlining(stackLength: Int) =
- !sameSymbols && inc.m.hasCode && shouldInline &&
- isSafeToInline(stackLength) // `isSafeToInline()` must be invoked last in this AND expr bc it mutates the `knownSafe` and `knownUnsafe` maps for good.
-
- def logFailure(stackLength: Int) = log(
- """|inline failed for %s:
- | pair.sameSymbols: %s
- | inc.numInlined < 2: %s
- | inc.m.hasCode: %s
- | isSafeToInline: %s
- | shouldInline: %s
- """.stripMargin.format(
- inc.m, sameSymbols, inlinedMethodCount(inc.sym) < 2,
- inc.m.hasCode, isSafeToInline(stackLength), shouldInline
- )
- )
-
- def failureReason(stackLength: Int) =
- if (!inc.m.hasCode) "bytecode was unavailable"
- else if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) "method is synchronized"
- else if (!isSafeToInline(stackLength)) "it is unsafe (target may reference private fields)"
- else "of a bug (run with -Ylog:inline -Ydebug for more information)"
+ def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
- def canAccess(level: NonPublicRefs.Value) = level match {
- case Private => caller.owner == inc.owner
- case Protected => caller.owner.tpe <:< inc.owner.tpe
- case Public => true
- }
- private def sameSymbols = caller.sym == inc.sym
- private def sameOwner = caller.owner == inc.owner
+ if(tfa.blackballed(inc.sym)) { return NeverSafeToInline }
- /** A method is safe to inline when:
- * - it does not contain calls to private methods when called from another class
- * - it is not inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * - it is not recursive
- * Note:
- * - synthetic private members are made public in this pass.
- */
- def isSafeToInline(stackLength: Int): Boolean = {
+ if(!isKnownToInlineSafely) {
- if(tfa.blackballed(inc.sym)) { return false }
- if(tfa.knownSafe(inc.sym)) { return true }
+ if(inc.openBlocks.nonEmpty) {
+ val msg = ("Encountered " + inc.openBlocks.size + " open block(s) in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller.m + ", callee = " + inc.m)
+ warn(inc.sym.pos, msg)
+ tfa.knownNever += inc.sym
+ return DontInlineHere("Open blocks in " + inc.m)
+ }
- if(helperIsSafeToInline(stackLength)) {
- tfa.knownSafe += inc.sym; true
- } else {
- tfa.knownUnsafe += inc.sym; false
- }
- }
+ val reasonWhyNever: String = {
+ var rs: List[String] = Nil
+ if(inc.isRecursive) { rs ::= "is recursive" }
+ if(isInlineForbidden) { rs ::= "is annotated @noinline" }
+ if(inc.isSynchronized) { rs ::= "is synchronized method" }
+ if(rs.isEmpty) null else rs.mkString("", ", and ", "")
+ }
- private def helperIsSafeToInline(stackLength: Int): Boolean = {
- def makePublic(f: Symbol): Boolean =
- /*
- * Completely disabling member publifying. This shouldn't have been done in the first place. :|
- */
- false
- // (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
- // debuglog("Making not-private symbol out of synthetic: " + f)
-
- // f setNotFlag Flags.PRIVATE
- // true
- // }
-
- if (!inc.m.hasCode || inc.isRecursive) { return false }
- if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) { return false }
-
- val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
- // Avoiding crashing the compiler if there are open blocks.
- inc.openBlocks foreach { b =>
- warn(inc.sym.pos,
- "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller.m + ", callee = " + inc.m
- )
- return false
+ if(reasonWhyNever != null) {
+ tfa.knownNever += inc.sym
+ // next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
+ return DontInlineHere(inc.m + " " + reasonWhyNever)
}
- def check(sym: Symbol, cond: Boolean) =
- if (cond) Private
- else if (sym.isProtected) Protected
- else Public
-
- def checkField(f: Symbol) = check(f, f.isPrivate && !makePublic(f))
- def checkSuper(m: Symbol) = check(m, m.isPrivate || !m.isClassConstructor)
- def checkMethod(m: Symbol) = check(m, m.isPrivate)
-
- def getAccess(i: Instruction) = i match {
- case CALL_METHOD(m, SuperCall(_)) => checkSuper(m)
- case CALL_METHOD(m, _) => checkMethod(m)
- case LOAD_FIELD(f, _) => checkField(f)
- case STORE_FIELD(f, _) => checkField(f)
- case _ => Public
+
+ if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
+ tfa.knownUnsafe += inc.sym;
+ return DontInlineHere("sameSymbols (ie caller == callee)")
}
- def iterate(): NonPublicRefs.Value = inc.instructions.foldLeft(Public)((res, inc) => getAccess(inc) match {
- case Private => log("instruction " + inc + " requires private access.") ; return Private
- case Protected => Protected
- case Public => res
- })
- iterate()
- })
+ }
- canAccess(accessNeeded) && {
- val isIllegalStack = (stackLength > inc.minimumStack && inc.hasNonFinalizerHandler)
+ /*
+ * From here on, two main categories of checks remain, (a) and (b) below:
+ * (a.1) either the scoring heuristics give green light; or
+ * (a.2) forced as candidate due to @inline.
+ * After that, safety proper is checked:
+ * (b.1) the callee does not contain calls to private methods when called from another class
+ * (b.2) the callee is not going to be inlined into a position with non-empty stack,
+ * while having a top-level finalizer (see liftedTry problem)
+ * As a result of (b), some synthetic private members can be chosen to become public.
+ */
- !isIllegalStack || {
- debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer.")
- false
- }
+ if(!isInlineForced && !isScoreOK) {
+ // During inlining retry, a previous caller-callee pair that scored low may pass.
+ // Thus, adding the callee to tfa.knownUnsafe isn't warranted.
+ return DontInlineHere("too low score (heuristics)")
+ }
+
+ if(isKnownToInlineSafely) { return InlineableAtThisCaller }
+
+ if(stackLength > inc.minimumStack && inc.hasNonFinalizerHandler) {
+ val msg = "method " + inc.sym + " is used on a non-empty stack with finalizer."
+ debuglog(msg)
+ // FYI: not reason enough to add inc.sym to tfa.knownUnsafe (because at other callsite in this caller, inlining might be ok)
+ return DontInlineHere(msg)
+ }
+
+ val accReq = inc.accessRequirements
+ if(!canAccess(accReq.accessNeeded)) {
+ tfa.knownUnsafe += inc.sym
+ return DontInlineHere("access level required by callee not matched by caller")
}
+
+ FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
+
}
- /** Decide whether to inline or not. Heuristics:
+ def canAccess(level: NonPublicRefs.Value) = level match {
+ case Private => caller.owner == inc.owner
+ case Protected => caller.owner.tpe <:< inc.owner.tpe
+ case Public => true
+ }
+ private def sameSymbols = caller.sym == inc.sym
+ private def sameOwner = caller.owner == inc.owner
+
+ /** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
* - it's bad to inline large methods
* - it's good to inline higher order functions
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- private def neverInline = caller.isBridge || !inc.m.hasCode || inc.noinline
- private def alwaysInline = inc.inline
-
- def shouldInline: Boolean = !neverInline && (alwaysInline || {
- debuglog("shouldInline: " + caller.m + " with " + inc.m)
+ def isScoreOK: Boolean = {
+ debuglog("shouldInline: " + caller.m + " , callee:" + inc.m)
var score = 0
@@ -855,7 +1010,7 @@ abstract class Inliners extends SubComponent {
log("shouldInline(" + inc.m + ") score: " + score)
score > 0
- })
+ }
}
def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 9b223a13ba..aa30a7a45b 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -195,6 +195,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
+ val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
/** Groups of Settings.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 046b177444..65b0ff1e6d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -613,8 +613,8 @@ abstract class ClassfileParser {
parseAttributes(sym, info)
getScope(jflags).enter(sym)
- // sealed java enums (experimental)
- if (isEnum && opt.experimental) {
+ // sealed java enums
+ if (isEnum) {
val enumClass = sym.owner.linkedClassOfClass
if (!enumClass.isSealed)
enumClass setFlag (SEALED | ABSTRACT)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 124d350385..ffcb682cf7 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1366,7 +1366,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def transform(tree: Tree): Tree =
reportError { transform1(tree) } {_ => tree}
-
+
def transform1(tree: Tree) = {
val symbol = tree.symbol
@@ -1437,7 +1437,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
- case None => super.transform(tree)
+ case None =>
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ // super.transform(tree)
}
case Select(Super(_, _), name) if illegalSpecializedInheritance(currentClass) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 60cc9e5fb8..49f5fca19d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -106,7 +106,7 @@ trait ContextErrors {
def errMsg = {
val paramName = param.name
val paramTp = param.tpe
- paramTp.typeSymbol match {
+ paramTp.typeSymbolDirect match {
case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
case _ =>
"could not find implicit value for "+
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 68782379a6..da045e1a48 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1224,7 +1224,9 @@ trait Implicits {
* reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
- private def manifestOfType(tp: Type, full: Boolean): SearchResult = {
+ private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = {
+ val full = flavor == FullManifestClass
+ val opt = flavor == OptManifestClass
/** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
@@ -1256,7 +1258,7 @@ trait Implicits {
if (containsExistential(tp1)) EmptyTree
else manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
case ConstantType(value) =>
- manifestOfType(tp1.deconst, full)
+ manifestOfType(tp1.deconst, FullManifestClass)
case TypeRef(pre, sym, args) =>
if (isPrimitiveValueClass(sym) || isPhantomClass(sym)) {
findSingletonManifest(sym.name.toString)
@@ -1299,22 +1301,13 @@ trait Implicits {
mot(tp1.skolemizeExistential, from, to)
case _ =>
EmptyTree
-/* !!! the following is almost right, but we have to splice nested manifest
- * !!! types into this type. This requires a substantial extension of
- * !!! reifiers.
- val reifier = new Reifier()
- val rtree = reifier.reifyTopLevel(tp1)
- manifestFactoryCall("apply", tp, rtree)
-*/
}
}
- val tagInScope =
- if (full) resolveTypeTag(pos, NoType, tp, concrete = true, allowMaterialization = false)
- else resolveClassTag(pos, tp, allowMaterialization = false)
- if (tagInScope.isEmpty) mot(tp, Nil, Nil)
- else {
- if (full) {
+ if (full) {
+ val tagInScope = resolveTypeTag(pos, NoType, tp, concrete = true, allowMaterialization = false)
+ if (tagInScope.isEmpty) mot(tp, Nil, Nil)
+ else {
if (ReflectRuntimeUniverse == NoSymbol) {
// todo. write a test for this
context.error(pos, s"""
@@ -1330,44 +1323,62 @@ trait Implicits {
|to proceed add a class tag to the type `$tp` (e.g. by introducing a context bound) and recompile.""".trim.stripMargin)
return SearchFailure
}
+ val cm = typed(Ident(ReflectRuntimeCurrentMirror))
+ val interop = gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
+ wrapResult(interop)
+ }
+ } else {
+ mot(tp, Nil, Nil) match {
+ case SearchFailure if opt => wrapResult(gen.mkAttributedRef(NoManifest))
+ case result => result
}
-
- val interop =
- if (full) {
- val cm = typed(Ident(ReflectRuntimeCurrentMirror))
- gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
- } else gen.mkMethodCall(ReflectBasis, nme.classTagToClassManifest, List(tp), List(tagInScope))
- wrapResult(interop)
}
}
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** The tag corresponding to type `pt`, provided `pt` is a flavor of a tag.
+ /** Materializes implicits of magic types (currently, manifests and tags).
+ * Will be replaced by implicit macros once we fix them.
*/
- private def implicitTagOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
- case TypeRef(pre, sym, arg :: Nil) if ManifestSymbols(sym) =>
- manifestOfType(arg, sym == FullManifestClass) match {
- case SearchFailure if sym == OptManifestClass => wrapResult(gen.mkAttributedRef(NoManifest))
- case result => result
- }
- case TypeRef(pre, sym, arg :: Nil) if TagSymbols(sym) =>
- tagOfType(pre, arg, sym)
- case tp@TypeRef(_, sym, _) if sym.isAbstractType =>
- implicitTagOrOfExpectedType(tp.bounds.lo) // #3977: use tp (==pt.dealias), not pt (if pt is a type alias, pt.bounds.lo == pt)
- case _ =>
+ private def materializeImplicit(pt: Type): SearchResult = {
+ def fallback = {
searchImplicit(implicitsOfExpectedType, false)
// shouldn't we pass `pt` to `implicitsOfExpectedType`, or is the recursive case
// for an abstract type really only meant for tags?
+ }
+
+ pt match {
+ case TypeRef(_, sym, _) if sym.isAbstractType =>
+ materializeImplicit(pt.dealias.bounds.lo) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.bounds.lo == pt)
+ case pt @ TypeRef(pre, sym, arg :: Nil) =>
+ sym match {
+ case sym if ManifestSymbols(sym) => manifestOfType(arg, sym)
+ case sym if TagSymbols(sym) => tagOfType(pre, arg, sym)
+ // as of late ClassManifest is an alias of ClassTag
+ // hence we need to take extra care when performing dealiasing
+ // because it might destroy the flavor of the manifest requested by the user
+ // when the user wants ClassManifest[T], we should invoke `manifestOfType` not `tagOfType`
+ // hence we don't do `pt.dealias` as we did before, but rather do `pt.betaReduce`
+ // unlike `dealias`, `betaReduce` performs at most one step of dealiasing
+ // while dealias pops all aliases in a single invocation
+ case sym if sym.isAliasType => materializeImplicit(pt.betaReduce)
+ case _ => fallback
+ }
+ case _ =>
+ fallback
+ }
}
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
- * // [Eugene] the following two lines should be deleted after we migrate delegate tag materialization to implicit macros
+ * // [Eugene] the following lines should be deleted after we migrate delegate tag materialization to implicit macros
* If that fails, and `pt` is an instance of a ClassTag, try to construct a class tag.
* If that fails, and `pt` is an instance of a TypeTag, try to construct a type tag.
+ * If that fails, and `pt` is an instance of a ClassManifest, try to construct a class manifest.
+ * If that fails, and `pt` is an instance of a Manifest, try to construct a manifest.
+ * If that fails, and `pt` is an instance of a OptManifest, try to construct a class manifest and return NoManifest if construction fails.
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
@@ -1387,7 +1398,7 @@ trait Implicits {
val failstart = Statistics.startTimer(oftypeFailNanos)
val succstart = Statistics.startTimer(oftypeSucceedNanos)
- result = implicitTagOrOfExpectedType(pt)
+ result = materializeImplicit(pt)
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
@@ -1434,7 +1445,16 @@ trait Implicits {
}
object ImplicitNotFoundMsg {
- def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg map (m => (new Message(sym, m)))
+ def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match {
+ case Some(m) => Some(new Message(sym, m))
+ case None if sym.isAliasType =>
+ // perform exactly one step of dealiasing
+ // this is necessary because ClassManifests are now aliased to ClassTags
+ // but we don't want to intimidate users by showing unrelated error messages
+ unapply(sym.info.resultType.betaReduce.typeSymbolDirect)
+ case _ => None
+ }
+
// check the message's syntax: should be a string literal that may contain occurrences of the string "${X}",
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index e99c31374e..960c210649 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1614,6 +1614,13 @@ trait Infer {
val saved = context.state
var fallback = false
context.setBufferErrors()
+ // We cache the current buffer because it is impossible to
+ // distinguish errors that occurred before entering tryTwice
+ // and our first attempt in 'withImplicitsDisabled'. If the
+ // first attempt fails we try with implicits on *and* clean
+ // buffer but that would also flush any pre-tryTwice valid
+ // errors, hence some manual buffer tweaking is necessary.
+ val errorsToRestore = context.flushAndReturnBuffer()
try {
context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
@@ -1627,8 +1634,10 @@ trait Infer {
case ex: TypeError => // recoverable cyclic references
context.restoreState(saved)
if (!fallback) infer(true) else ()
+ } finally {
+ context.restoreState(saved)
+ context.updateBuffer(errorsToRestore)
}
- context.restoreState(saved)
}
else infer(true)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index d157666e47..322b9ebb25 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1221,7 +1221,10 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case ex: Throwable =>
None
}
- } getOrElse realex.getMessage
+ } getOrElse {
+ val msg = realex.getMessage
+ if (msg != null) msg else realex.getClass.getName
+ }
fail(typer, expandee, msg = "exception during macro expansion: " + message)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index decd18b599..6428173577 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -502,32 +502,29 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethodOrGetter(tree: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = tree.symbol
- val lazyType = completerOf(tree, tparams)
- def completeCopyFirst = sym.isSynthetic && (!sym.hasDefault || sym.owner.info.member(nme.copy).isSynthetic)
- def completeCopyMethod(clazz: Symbol) {
- // the 'copy' method of case classes needs a special type completer to make
- // bug0054.scala (and others) work. the copy method has to take exactly the same
- // parameter types as the primary constructor.
+ def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
+ val sym = copyDefDef.symbol
+ val lazyType = completerOf(copyDefDef, tparams)
+
+ /** Assign the types of the class parameters to the parameters of the
+ * copy method. See comment in `Unapplies.caseClassCopyMeth` */
+ def assignParamTypes() {
+ val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
- val vparamss = tree match { case x: DefDef => x.vparamss ; case _ => Nil }
- val cparamss = constructorType.paramss
-
- map2(vparamss, cparamss)((vparams, cparams) =>
- map2(vparams, cparams)((param, cparam) =>
- // need to clone the type cparam.tpe???
- // problem is: we don't have the new owner yet (the new param symbol)
- param.tpt setType subst(cparam.tpe)
+ val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val classParamss = constructorType.paramss
+ val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
+
+ map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyParams, classParams)((copyP, classP) =>
+ copyP.tpt setType subst(classP.tpe)
)
)
}
- sym setInfo {
- mkTypeCompleter(tree) { copySym =>
- if (completeCopyFirst)
- completeCopyMethod(copySym.owner)
+ sym setInfo {
+ mkTypeCompleter(copyDefDef) { sym =>
+ assignParamTypes()
lazyType complete sym
}
}
@@ -604,8 +601,8 @@ trait Namers extends MethodSynthesis {
val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
- if (name == nme.copy || tree.symbol.name.startsWith(nme.copy + nme.DEFAULT_GETTER_STRING))
- enterCopyMethodOrGetter(tree, tparams)
+ if (name == nme.copy && sym.isSynthetic)
+ enterCopyMethod(tree, tparams)
else
sym setInfo completerOf(tree, tparams)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index c466206192..53c2d16928 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -47,7 +47,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val phaseName: String = "patmat"
- def patmatDebug(msg: String) = println(msg)
+ // TODO: the inliner fails to inline the closures to patmatDebug
+ // private val printPatmat = settings.Ypatmatdebug.value
+ // @inline final def patmatDebug(s: => String) = if (printPatmat) println(s)
def newTransformer(unit: CompilationUnit): Transformer =
if (opt.virtPatmat) new MatchTransformer(unit)
@@ -143,6 +145,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val typer: Typer
val matchOwner = typer.context.owner
+ def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportMissingCases(pos: Position, counterExamples: List[String]) = {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
def inMatchMonad(tp: Type): Type
def pureType(tp: Type): Type
final def matchMonadResult(tp: Type): Type =
@@ -175,7 +186,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// (that would require more sophistication when generating trees,
// and the only place that emits Matches after typers is for exception handling anyway)
if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
- // patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
+ // patmatDebug ("translating "+ cases.mkString("{", "\n", "}"))
def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
@@ -300,13 +311,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
// if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+ // patmatDebug ("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+
// must use type `tp`, which is provided by extractor's result, not the type expected by binder,
// as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
// (it will later result in a type test when `tp` is not a subtype of `b.info`)
// TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // patmatDebug("changing "+ b +" : "+ b.info +" -> "+ tp);
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
+ // patmatDebug ("changing "+ b +" : "+ b.info +" -> "+ tp)
+ b setInfo tp
+ }
- // patmatDebug("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
val (typeTestTreeMaker, patBinderOrCasted) =
@@ -410,7 +425,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- // patmatDebug("WARNING: Bind tree with unbound symbol "+ patTree)
+ // patmatDebug ("WARNING: Bind tree with unbound symbol "+ patTree)
noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
// case Star(_) | ArrayValue | This => error("stone age pattern relics encountered!")
@@ -792,7 +807,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) =
(cases, Nil)
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
None
// for catch (no need to customize match failure)
@@ -813,7 +828,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
if (currSub ne null) {
- // patmatDebug("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ // patmatDebug ("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
Thread.dumpStack()
}
else currSub = outerSubst >> substitution
@@ -979,7 +994,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
**/
case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
import TypeTestTreeMaker._
- // patmatDebug("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+ // patmatDebug ("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
lazy val outerTestNeeded = (
!((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
@@ -1103,11 +1118,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
}
+ // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
fixerUpper(owner, scrut.pos){
- val ptDefined = if (isFullyDefined(pt)) pt else NoType
def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- // patmatDebug("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+ // patmatDebug ("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (unchecked, requireSwitch) =
if (settings.XnoPatmatAnalysis.value) (true, false)
@@ -1120,7 +1135,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
(false, false)
}
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked).getOrElse{
if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
if (casesNoSubstOnly nonEmpty) {
@@ -1161,12 +1176,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
t match {
case Function(_, _) if t.symbol == NoSymbol =>
t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- // patmatDebug("new symbol for "+ (t, t.symbol.ownerChain))
+ // patmatDebug ("new symbol for "+ (t, t.symbol.ownerChain))
case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- // patmatDebug("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- // patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor)
d.symbol.lazyAccessor.owner = currentOwner
@@ -1176,7 +1191,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- // patmatDebug("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
case _ =>
}
super.traverse(t)
@@ -1208,6 +1223,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// local / context-free
def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree
def _equals(checker: Tree, binder: Symbol): Tree
def _isInstanceOf(b: Symbol, tp: Type): Tree
def and(a: Tree, b: Tree): Tree
@@ -1327,10 +1343,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var currId = 0
}
case class Test(cond: Cond, treeMaker: TreeMaker) {
- // def <:<(other: Test) = cond <:< other.cond
- // def andThen_: (prev: List[Test]): List[Test] =
- // prev.filterNot(this <:< _) :+ this
-
// private val reusedBy = new collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
@@ -1344,38 +1356,20 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
"T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
}
+ // TODO: remove Cond, replace by Prop from Logic
object Cond {
- // def refines(self: Cond, other: Cond): Boolean = (self, other) match {
- // case (Bottom, _) => true
- // case (Havoc , _) => true
- // case (_ , Top) => true
- // case (_ , _) => false
- // }
var currId = 0
}
abstract class Cond {
- // def testedPath: Tree
- // def <:<(other: Cond) = Cond.refines(this, other)
-
val id = { Cond.currId += 1; Cond.currId}
}
- // does not contribute any knowledge
- case object Top extends Cond {override def toString = "T"}
-
-
- // takes away knowledge. e.g., a user-defined guard
- case object Havoc extends Cond {override def toString = "_|_"}
-
- // we know everything! everything!
- // this either means the case is unreachable,
- // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives
- // case object Bottom extends Cond
-
+ case object TrueCond extends Cond {override def toString = "T"}
+ case object FalseCond extends Cond {override def toString = "F"}
case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
- case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
+ case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
object EqualityCond {
private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
@@ -1383,12 +1377,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
}
class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
-
override def toString = testedPath +" == "+ rhs +"#"+ id
}
@@ -1407,11 +1395,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
}
class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
override def toString = testedPath +" : "+ pt +"#"+ id
}
@@ -1434,9 +1417,27 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => false
})
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ case TypeRef(_, SomeClass, _) => true
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ case ConstantType(Constant(true)) => true
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol, Substitution)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder, subst) if irrefutableExtractorType(extractor.tpe) =>
+ Some(extractor, nextBinder, subst)
+ case _ =>
+ None
+ }
+ }
// returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
- abstract class TreeMakersToConds(val root: Symbol) {
+ class TreeMakersToConds(val root: Symbol) {
def discard() = {
pointsToBound.clear()
trees.clear()
@@ -1471,20 +1472,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// reverse substitution that would otherwise replace a variable we already encountered by a new variable
// NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
- // patmatDebug("normalize: "+ normalize)
+ // patmatDebug ("normalize subst: "+ normalize)
val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
- // patmatDebug("pointsToBound: "+ pointsToBound)
+ // patmatDebug ("pointsToBound: "+ pointsToBound)
accumSubst >>= okSubst
- // patmatDebug("accumSubst: "+ accumSubst)
+ // patmatDebug ("accumSubst: "+ accumSubst)
}
// hashconsing trees (modulo value-equality)
def unique(t: Tree, tpOverride: Type = NoType): Tree =
trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) => orig // patmatDebug("unique: "+ (t eq orig, orig));
+ case Some(orig) =>
+ // patmatDebug("unique: "+ (t eq orig, orig))
+ orig
case _ =>
trees += t
if (tpOverride != NoType) t setType tpOverride
@@ -1504,27 +1507,20 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
final def binderToUniqueTree(b: Symbol) =
unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
- @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) Top else conds.reduceLeft(AndCond(_, _))
- @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) Havoc else conds.reduceLeft(OrCond(_, _))
+ @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
+ @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
// note that the sequencing of operations is important: must visit in same order as match execution
// binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
- final protected def treeMakerToCond(tm: TreeMaker, condMaker: CondMaker): Cond = {
- updateSubstitution(tm.substitution)
- condMaker(tm)(treeMakerToCond(_, condMaker))
- }
+ final def treeMakerToCond(tm: TreeMaker, handleUnknown: TreeMaker => Cond, updateSubst: Boolean, rewriteNil: Boolean = false): Cond = {
+ if (updateSubst) updateSubstitution(tm.substitution)
- final protected def treeMakerToCondNoSubst(tm: TreeMaker, condMaker: CondMaker): Cond =
- condMaker(tm)(treeMakerToCondNoSubst(_, condMaker))
-
- type CondMaker = TreeMaker => (TreeMaker => Cond) => Cond
- final def makeCond(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = {
tm match {
case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
type Result = Cond
def and(a: Result, b: Result) = AndCond(a, b)
- def outerTest(testedBinder: Symbol, expectedTp: Type) = Top // TODO OuterEqCond(testedBinder, expectedType)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = TrueCond // TODO OuterEqCond(testedBinder, expectedType)
def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
val p = binderToUniqueTree(b); AndCond(NonNullCond(p), TypeCond(p, uniqueTp(pt)))
}
@@ -1534,34 +1530,56 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map recurse)))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map (treeMakerToCond(_, handleUnknown, updateSubst)))))
case ProductExtractorTreeMaker(testedBinder, None, subst) => NonNullCond(binderToUniqueTree(testedBinder))
- case ExtractorTreeMaker(_, _, _, _)
- | GuardTreeMaker(_)
- | ProductExtractorTreeMaker(_, Some(_), _)
- | BodyTreeMaker(_, _) => Havoc
- case SubstOnlyTreeMaker(_, _) => Top
+ case IrrefutableExtractorTreeMaker(_, _, _) =>
+ // the extra condition is None, the extractor's result indicates it always succeeds,
+ // and the potential type-test for the argument is represented by a separate TypeTestTreeMaker
+ TrueCond
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => TrueCond
+ case ConstantType(Constant(false)) => FalseCond
+ case _ => handleUnknown(tm)
+ }
+ case p @ ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
+ p.checkedLength match {
+ // special-case: interpret pattern `List()` as `Nil`
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
+ case Some(0) if rewriteNil && testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
+ EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ case _ => handleUnknown(tm)
+ }
+ case SubstOnlyTreeMaker(_, _) => TrueCond
+ case ProductExtractorTreeMaker(_, Some(_), _) |
+ ExtractorTreeMaker(_, _, _, _) | BodyTreeMaker(_, _) => handleUnknown(tm)
}
}
- final def approximateMatch(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCond(tm, condMaker), tm)) }
+ val constFalse = (_: TreeMaker) => FalseCond
+ val constTrue = (_: TreeMaker) => TrueCond
+
+ final def approximateMatch(cases: List[List[TreeMaker]], handleUnknown: TreeMaker => Cond = constFalse, rewriteNil: Boolean = false): List[List[Test]] =
+ cases.map { _ map (tm => Test(treeMakerToCond(tm, handleUnknown, updateSubst = true, rewriteNil), tm)) }
- final def approximateMatchAgain(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCondNoSubst(tm, condMaker), tm)) }
+ final def approximateMatchAgain(cases: List[List[TreeMaker]], handleUnknown: TreeMaker => Cond = constFalse, rewriteNil: Boolean = false): List[List[Test]] =
+ cases.map { _ map (tm => Test(treeMakerToCond(tm, handleUnknown, updateSubst = false, rewriteNil), tm)) }
}
+
def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
object approximator extends TreeMakersToConds(root)
approximator.approximateMatch(cases)
}
def showTreeMakers(cases: List[List[TreeMaker]]) = {
- patmatDebug("treeMakers:")
- patmatDebug(alignAcrossRows(cases, ">>"))
+ // patmatDebug ("treeMakers:")
+ // patmatDebug (alignAcrossRows(cases, ">>"))
}
def showTests(testss: List[List[Test]]) = {
- patmatDebug("tests: ")
- patmatDebug(alignAcrossRows(testss, "&"))
+ // patmatDebug ("tests: ")
+ // patmatDebug (alignAcrossRows(testss, "&"))
}
}
@@ -1597,24 +1615,38 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class Eq(p: Var, q: Const) extends Prop
type Const <: AbsConst
-
trait AbsConst {
def implies(other: Const): Boolean
def excludes(other: Const): Boolean
}
+ type TypeConst <: Const
+ def TypeConst: TypeConstExtractor
+ trait TypeConstExtractor {
+ def apply(tp: Type): Const
+ }
+
+ def NullConst: Const
+
type Var <: AbsVar
trait AbsVar {
// indicate we may later require a prop for V = C
def registerEquality(c: Const): Unit
- // indicates null is part of the domain
- def considerNull: Unit
+ // call this to indicate null is part of the domain
+ def registerNull: Unit
+
+ // can this variable be null?
+ def mayBeNull: Boolean
- // compute the domain and return it (call considerNull first!)
+ // compute the domain and return it (call registerNull first!)
def domainSyms: Option[Set[Sym]]
+ // the symbol for this variable being equal to its statically known type
+ // (only available if registerEquality has been called for that type before)
+ def symForStaticTp: Option[Sym]
+
// for this var, call it V, turn V = C into the equivalent proposition in boolean logic
// registerEquality(c) must have been called prior to this call
// in fact, all equalities relevant to this variable must have been registered
@@ -1692,7 +1724,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
//
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
- def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = Statistics.startTimer(patmatAnaVarEq)
val vars = new collection.mutable.HashSet[Var]
@@ -1714,7 +1746,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
props foreach gatherEqualities.apply
- if (considerNull) vars foreach (_.considerNull)
+ if (modelNull) vars foreach (_.registerNull)
val pure = props map rewriteEqualsToProp.apply
@@ -1730,7 +1762,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def hashCode = a.hashCode ^ b.hashCode
}
- // patmatDebug("vars: "+ vars)
+ // patmatDebug ("removeVarEq vars: "+ vars)
vars.foreach { v =>
val excludedPair = new collection.mutable.HashSet[ExcludedPair]
@@ -1741,15 +1773,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// coverage is formulated as: A \/ B \/ C and the implications are
v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+ // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
+ // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
+ v.symForStaticTp foreach { symForStaticTp =>
+ if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
+ else addAxiom(symForStaticTp)
+ }
+
val syms = v.equalitySyms
- // patmatDebug("eqSyms "+(v, syms))
+ // patmatDebug ("eqSyms "+(v, syms))
syms foreach { sym =>
// if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
// (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
val todo = syms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
val (excluded, notExcluded) = todo partition (b => sym.const.excludes(b.const))
val implied = notExcluded filter (b => sym.const.implies(b.const))
- // patmatDebug("implications: "+ (sym.const, excluded, implied, syms))
+ // patmatDebug ("eq axioms for: "+ sym.const)
+ // patmatDebug ("excluded: "+ excluded)
+ // patmatDebug ("implied: "+ implied)
// when this symbol is true, what must hold...
implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
@@ -1762,8 +1803,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- // patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
- // patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
+ // patmatDebug ("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
+ // patmatDebug ("pure:"+ pure.map(p => cnfString(eqFreePropToSolvable(p))).mkString("\n"))
Statistics.stopTimer(patmatAnaVarEq, start)
@@ -1778,7 +1819,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// may throw an CNFBudgetExceeded
def propToSolvable(p: Prop) = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), considerNull = false)
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
eqFreePropToSolvable(And(eqAxioms, pure))
}
@@ -1905,12 +1946,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
if (recursionDepthAllowed == 0) models
else {
- // patmatDebug("solving\n"+ cnfString(f))
+ // patmatDebug ("find all models for\n"+ cnfString(f))
val model = findModelFor(f)
// if we found a solution, conjunct the formula with the model's negation and recurse
if (model ne NoModel) {
val unassigned = (vars -- model.keySet).toList
- // patmatDebug("unassigned "+ unassigned +" in "+ model)
+ // patmatDebug ("unassigned "+ unassigned +" in "+ model)
def force(lit: Lit) = {
val model = withLit(findModelFor(dropUnit(f, lit)), lit)
if (model ne NoModel) List(model)
@@ -1919,7 +1960,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val forced = unassigned flatMap { s =>
force(Lit(s, true)) ++ force(Lit(s, false))
}
- // patmatDebug("forced "+ forced)
+ // patmatDebug ("forced "+ forced)
val negated = negateModel(model)
findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
}
@@ -1942,7 +1983,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def findModelFor(f: Formula): Model = {
@inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
- // patmatDebug("dpll\n"+ cnfString(f))
+ // patmatDebug ("DPLL\n"+ cnfString(f))
val start = Statistics.startTimer(patmatAnaDPLL)
@@ -2004,7 +2045,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private val uniques = new collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
- class Var(val path: Tree, fullTp: Type) extends AbsVar {
+ class Var(val path: Tree, staticTp: Type) extends AbsVar {
private[this] val id: Int = Var.nextId
// private[this] var canModify: Option[Array[StackTraceElement]] = None
@@ -2016,10 +2057,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
// when looking at the domain, we only care about types we can check at run time
- val domainTp: Type = checkableType(fullTp)
+ val staticTpCheckable: Type = checkableType(staticTp)
- private[this] var _considerNull = false
- def considerNull: Unit = { ensureCanModify; if (NullTp <:< domainTp) _considerNull = true }
+ private[this] var _mayBeNull = false
+ def registerNull: Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
// case Some(List(tps: _*)) => domain is exactly tps
@@ -2027,7 +2069,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
lazy val domain: Option[Set[Const]] = {
- val subConsts = enumerateSubtypes(fullTp).map{ tps =>
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
tps.toSet[Type].map{ tp =>
val domainC = TypeConst(tp)
registerEquality(domainC)
@@ -2036,18 +2078,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
val allConsts =
- if (! _considerNull) subConsts
- else {
+ if (mayBeNull) {
registerEquality(NullConst)
subConsts map (_ + NullConst)
- }
+ } else
+ subConsts
observed; allConsts
}
- // accessing after calling considerNull will result in inconsistencies
+ // accessing after calling registerNull will result in inconsistencies
lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
@@ -2061,8 +2104,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
- // don't call until all equalities have been registered and considerNull has been called (if needed)
- def describe = toString + ": " + fullTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
+ // don't call until all equalities have been registered and registerNull has been called (if needed)
+ def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
override def toString = "V"+ id
}
@@ -2071,7 +2114,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
// equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
object Const {
- def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear() } // patmatDebug("RESET")
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
private var _nextTypeId = 0
def nextTypeId = {_nextTypeId += 1; _nextTypeId}
@@ -2083,9 +2126,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
- case Some((_, c)) => c
+ case Some((_, c)) =>
+ // patmatDebug ("unique const: "+ (tp, c))
+ c
case _ =>
val fresh = mkFresh
+ // patmatDebug ("uniqued const: "+ (tp, fresh))
uniques(tp) = fresh
fresh
})
@@ -2101,19 +2147,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (!t.symbol.isStable) t.tpe.narrow
else trees find (a => a.correspondsStructure(t)(sameValue)) match {
case Some(orig) =>
- // patmatDebug("unique: "+ (orig, orig.tpe))
+ // patmatDebug ("unique tp for tree: "+ (orig, orig.tpe))
orig.tpe
case _ =>
// duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- // patmatDebug("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ // patmatDebug ("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
trees += treeWithNarrowedType
treeWithNarrowedType.tpe
}
}
sealed abstract class Const extends AbsConst {
- protected def tp: Type
+ def tp: Type
protected def wideTp: Type
def isAny = wideTp.typeSymbol == AnyClass
@@ -2144,7 +2190,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case (_: TypeConst, _: TypeConst) => !((tp <:< other.tp) || (other.tp <:< tp))
case _ => false
}
- // if(r) patmatDebug("excludes : "+(this, other))
+ // if(r) patmatDebug("excludes : "+(this, this.tp, other, other.tp, (tp <:< other.tp), (tp <:< other.wideTp), (other.tp <:< tp), (other.tp <:< wideTp)))
// else patmatDebug("NOT excludes: "+(this, other))
r
}
@@ -2169,7 +2215,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
clsTp
}
- object TypeConst {
+ object TypeConst extends TypeConstExtractor {
def apply(tp: Type) = {
if (tp =:= NullTp) NullConst
else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
@@ -2215,7 +2261,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
singleType(tp.prefix, p.symbol)
case _ =>
- // patmatDebug("unique type for "+(p, Const.uniqueTpForTree(p)))
Const.uniqueTpForTree(p)
}
@@ -2235,7 +2280,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
lazy val NullTp = ConstantType(Constant(null))
case object NullConst extends Const {
- protected def tp = NullTp
+ def tp = NullTp
protected def wideTp = NullTp
def isValue = true
@@ -2249,8 +2294,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def symbolic(t: Cond): Prop = t match {
case AndCond(a, b) => And(symbolic(a), symbolic(b))
case OrCond(a, b) => Or(symbolic(a), symbolic(b))
- case Top => True
- case Havoc => False
+ case TrueCond => True
+ case FalseCond => False
case TypeCond(p, pt) => Eq(Var(p), TypeConst(checkableType(pt)))
case EqualityCond(p, q) => Eq(Var(p), ValueConst(q))
case NonNullCond(p) => if (!modelNull) True else Not(Eq(Var(p), NullConst))
@@ -2274,46 +2319,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// thus, the case is unreachable if there is no model for -(-P /\ C),
// or, equivalently, P \/ -C, or C => P
def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
- // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
- // when approximating the current case (which we hope is reachable), be optimistic about the unknowns
- object reachabilityApproximation extends TreeMakersToConds(prevBinder) {
- def makeCondOptimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
- // for unreachability, let's assume a guard always matches (unless we statically determined otherwise)
- // otherwise, a guarded case would be considered unreachable
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(false)) => Havoc // not the best name; however, symbolically, 'Havoc' becomes 'False'
- case _ => Top
- }
- // similar to a guard, user-defined extractors should not cause us to freak out
- // if we're not 100% sure it does not match (i.e., its result type is None or Constant(false) -- TODO),
- // let's stay optimistic and assume it does
- case ExtractorTreeMaker(_, _, _, _)
- | ProductExtractorTreeMaker(_, Some(_), _) => Top
- // TODO: consider length-checks
- case _ =>
- makeCond(tm)(recurse)
- }
-
- // be pessimistic when approximating the prefix of the current case
- // we hope the prefix fails so that we might get to the current case, which we hope is not dead
- def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
- }
-
val start = Statistics.startTimer(patmatAnaReach)
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
- val testCasesOk = reachabilityApproximation.approximateMatch(cases, reachabilityApproximation.makeCondOptimistic)
- val testCasesFail = reachabilityApproximation.approximateMatchAgain(cases, reachabilityApproximation.makeCondPessimistic)
+ val reachabilityApproximation = new TreeMakersToConds(prevBinder)
+ val testCasesOk = reachabilityApproximation.approximateMatch(cases, reachabilityApproximation.constTrue)
+ val testCasesFail = reachabilityApproximation.approximateMatchAgain(cases, reachabilityApproximation.constFalse)
reachabilityApproximation.discard()
prepareNewAnalysis()
val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, considerNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, considerNull = true)
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
try {
// most of the time eqAxiomsFail == eqAxiomsOk, but the different approximations might cause different variables to disapper in general
@@ -2327,8 +2347,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var reachable = true
var caseIndex = 0
- // patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
- // patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsCNF))
+ // patmatDebug ("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
+ // patmatDebug ("equality axioms:\n"+ cnfString(eqAxiomsCNF))
// invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
// termination: prefixRest.length decreases by 1
@@ -2369,14 +2389,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case UnitClass =>
Some(List(UnitClass.tpe))
case BooleanClass =>
- // patmatDebug("enum bool "+ tp)
Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
// TODO case _ if tp.isTupleType => // recurse into component types
case modSym: ModuleClassSymbol =>
Some(List(tp))
// make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- // patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ // patmatDebug ("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
None
case sym =>
val subclasses = (
@@ -2384,7 +2403,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- // patmatDebug("subclasses "+ (sym, subclasses))
+ // patmatDebug ("enum sealed -- subclasses: "+ (sym, subclasses))
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
@@ -2400,7 +2419,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
})
- // patmatDebug("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ // patmatDebug ("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
Some(validSubTypes)
}
@@ -2420,7 +2439,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
val res = toCheckable(tp)
- // patmatDebug("checkable "+(tp, res))
+ // patmatDebug ("checkable "+(tp, res))
res
}
@@ -2444,37 +2463,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - there are extractor calls (that we can't secretly/soundly) rewrite
val start = Statistics.startTimer(patmatAnaExhaust)
var backoff = false
- object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
- def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
- case p @ ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
- p.checkedLength match {
- // pattern: `List()` (interpret as `Nil`)
- // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil
- case Some(0) if testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
- EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
- case _ =>
- backoff = true
- makeCond(tm)(recurse)
- }
- case ExtractorTreeMaker(_, _, _, _) =>
-// patmatDebug("backing off due to "+ tm)
- backoff = true
- makeCond(tm)(recurse)
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(true)) => Top
- case ConstantType(Constant(false)) => Havoc
- case _ =>
-// patmatDebug("can't statically interpret guard: "+(guard, guard.tpe))
- backoff = true
- Havoc
- }
- case _ =>
- makeCond(tm)(recurse)
- }
- }
- val tests = exhaustivityApproximation.approximateMatch(cases, exhaustivityApproximation.makeCondExhaustivity)
+ val exhaustivityApproximation = new TreeMakersToConds(prevBinder)
+ val tests = exhaustivityApproximation.approximateMatch(cases, {
+ case BodyTreeMaker(_, _) => TrueCond // will be discarded by symbolCase later
+ case tm =>
+ // patmatDebug("backing off due to "+ tm)
+ backoff = true
+ FalseCond
+ }, rewriteNil = true)
if (backoff) Nil else {
val prevBinderTree = exhaustivityApproximation.binderToUniqueTree(prevBinder)
@@ -2497,15 +2494,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// when does the match fail?
val matchFails = Not(\/(symbolicCases))
+ val vars = gatherVariables(matchFails)
// debug output:
- // patmatDebug("analysing:")
- // showTreeMakers(cases)
- // showTests(tests)
- //
- // val vars = gatherVariables(matchFails)
+ // patmatDebug ("analysing:")
+ showTreeMakers(cases)
+ showTests(tests)
+
// patmatDebug("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
- //
// patmatDebug("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
try {
@@ -2521,7 +2517,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
pruned
} catch {
case e : CNFBudgetExceeded =>
- // patmatDebug(util.Position.formatMessage(prevBinder.pos, "Cannot check match for exhaustivity", false))
+ // patmatDebug (util.Position.formatMessage(prevBinder.pos, "Cannot check match for exhaustivity", false))
// e.printStackTrace()
Nil // CNF budget exceeded
}
@@ -2542,13 +2538,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
- case class NegativeExample(nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
// require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
override def toString = {
val negation =
if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
- else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("in (", ", ", ")")
- "<not "+ negation +">"
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
+ "(x: "+ eqTo +" forSome x not in "+ negation +")"
}
}
case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
@@ -2594,7 +2590,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
- v +"(="+ v.path +": "+ v.domainTp +") "+ assignment
+ v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
}.mkString("\n")
def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model))
@@ -2611,13 +2607,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// ...
val varAssignment = modelToVarAssignment(model)
- // patmatDebug("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+ // patmatDebug ("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
// chop a path into a list of symbols
def chop(path: Tree): List[Symbol] = path match {
case Ident(_) => List(path.symbol)
case Select(pre, name) => chop(pre) :+ path.symbol
- case _ => // patmatDebug("don't know how to chop "+ path)
+ case _ =>
+ // patmatDebug("don't know how to chop "+ path)
Nil
}
@@ -2658,8 +2655,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// node in the tree that describes how to construct a counter-example
case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
- private lazy val prunedEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
- private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.domainTp }).typeSymbol.primaryConstructor
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
+ private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
@@ -2676,7 +2674,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def toCounterExample(beBrief: Boolean = false): CounterExample =
if (!allFieldAssignmentsLegal) NoExample
else {
- // patmatDebug("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ // patmatDebug ("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
val res = prunedEqualTo match {
// a definite assignment to a value
case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
@@ -2684,9 +2682,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// constructor call
// or we did not gather any information about equality but we have information about the fields
// --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
- case _ if cls != NoSymbol &&
- ( prunedEqualTo.nonEmpty
- || (fields.nonEmpty && !isPrimitiveValueClass(cls) && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+ case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
+ ( uniqueEqualTo.nonEmpty
+ || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
@inline def args(brevity: Boolean = beBrief) = {
// figure out the constructor arguments from the field assignment
@@ -2707,13 +2705,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// negative information
case Nil if nonTrivialNonEqualTo.nonEmpty =>
// negation tends to get pretty verbose
- if (beBrief) WildcardExample else NegativeExample(nonTrivialNonEqualTo)
+ if (beBrief) WildcardExample
+ else {
+ val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
+ NegativeExample(eqTo, nonTrivialNonEqualTo)
+ }
// not a valid counter-example, possibly since we have a definite type but there was a field mismatch
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- // patmatDebug("described as: "+ res)
+ // patmatDebug ("described as: "+ res)
res
}
@@ -2748,16 +2750,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val cond = test.cond
def simplify(c: Cond): Set[Cond] = c match {
- case AndCond(a, b) => simplify(a) ++ simplify(b)
- case OrCond(_, _) => Set(Havoc) // TODO: supremum?
- case NonNullCond(_) => Set(Top) // not worth remembering
- case _ => Set(c)
+ case AndCond(a, b) => simplify(a) ++ simplify(b)
+ case OrCond(_, _) => Set(FalseCond) // TODO: make more precise
+ case NonNullCond(_) => Set(TrueCond) // not worth remembering
+ case _ => Set(c)
}
val conds = simplify(cond)
- if (conds(Havoc)) false // stop when we encounter a havoc
+ if (conds(FalseCond)) false // stop when we encounter a definite "no" or a "not sure"
else {
- val nonTrivial = conds filterNot (_ == Top)
+ val nonTrivial = conds filterNot (_ == TrueCond)
if (nonTrivial nonEmpty) {
tested ++= nonTrivial
@@ -2784,7 +2786,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
tested.clear()
tests dropWhile storeDependencies
}
- // patmatDebug("dependencies: "+ dependencies)
+ // patmatDebug ("dependencies: "+ dependencies)
// find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
// then, collapse these contiguous sequences of reusing tests
@@ -2802,7 +2804,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// if there's no sharing, simply map to the tree makers corresponding to the tests
var currDeps = Set[Cond]()
val (sharedPrefix, suffix) = tests span { test =>
- (test.cond eq Top) || (for(
+ (test.cond == TrueCond) || (for(
reusedTest <- test.reuses;
nextDeps <- dependencies.get(reusedTest);
diff <- (nextDeps -- currDeps).headOption;
@@ -2819,23 +2821,23 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
// patmatDebug("sharedPrefix: "+ sharedPrefix)
- // if the shared prefix contains interesting conditions (!= Top)
+ // if the shared prefix contains interesting conditions (!= TrueCond)
// and the last of such interesting shared conditions reuses another treemaker's test
// replace the whole sharedPrefix by a ReusingCondTreeMaker
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond == TrueCond).headOption;
lastReused <- lastShared.reuses)
yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
}
- collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains TrueCond-tests, which are dropped above)
}
okToCall = true // TODO: remove (debugging)
// replace original treemakers that are reused (as determined when computing collapsed),
// by ReusedCondTreeMakers
val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
-// patmatDebug("after CSE:")
-// showTreeMakers(reusedMakers)
+ // patmatDebug ("after CSE:")
+ showTreeMakers(reusedMakers)
reusedMakers
}
@@ -2918,55 +2920,197 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def alternativesSupported: Boolean
+ // when collapsing guarded switch cases we may sometimes need to jump to the default case
+ // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
+ // TODO: make more fine-grained, as we don't always need to jump
+ def canJump: Boolean
+
+ def unchecked: Boolean
+
+
def isDefault(x: CaseDef): Boolean
def defaultSym: Symbol
def defaultBody: Tree
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+ object GuardAndBodyTreeMakers {
+ def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
+ tms match {
+ case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
+ case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
+ case _ => None
+ }
+ }
+ }
+
+ private val defaultLabel: Symbol = NoSymbol.newLabel(freshName("default"), NoPosition) setFlag SYNTH_CASE
+
+ /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
+ *
+ * `{case C if(G_i) => B_i | case C'_j if G'_j => B'_j}*` is rewritten to
+ * `case C => {if(G_i) B_i}*` ++ rewrite({case C'_j if G'_j => B'_j}*)
+ *
+ */
+ private def collapseGuardedCases(cases: List[CaseDef]) = {
+ // requires(switchesOnSameConst.forall(caseChecksSameConst(switchesOnSameConst.head)))
+ def collapse(switchesOnSameConst: List[CaseDef]): List[CaseDef] =
+ if (switchesOnSameConst.tail.isEmpty && (switchesOnSameConst.head.guard == EmptyTree)) switchesOnSameConst
+ else {
+ val commonPattern = switchesOnSameConst.head.pat
+
+ // jump to default case (either the user-supplied one or the synthetic one)
+ // unless we're collapsing the default case, then re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
+ val jumpToDefault: Tree =
+ if (!canJump || isDefault(CaseDef(commonPattern, EmptyTree, EmptyTree))) defaultBody
+ else Apply(Ident(defaultLabel), Nil)
+
+ val guardedBody = switchesOnSameConst.foldRight(jumpToDefault){
+ // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
+ // --> replace jumpToDefault by the un-guarded case's body
+ case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
+ case (CaseDef(_, g, b), els) if g != EmptyTree => If(g, b, els)
+ // error: the un-guarded case did not come last
+ case _ =>
+ return switchesOnSameConst
+ }
+
+ // if the cases that we're going to collapse bind variables,
+ // must replace them by the single binder introduced by the collapsed case
+ val binders = switchesOnSameConst.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
+ val (pat, guardedBodySubst) =
+ if (binders.isEmpty) (commonPattern, guardedBody)
+ else {
+ // create a single fresh binder to subsume the old binders (and their types)
+ // TODO: I don't think the binder's types can actually be different (due to checks in caseChecksSameConst)
+ // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
+ val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)))
+
+ // the patterns in switchesOnSameConst are equal (according to caseChecksSameConst) and modulo variable-binding
+ // we can thus safely pick the first one arbitrarily, provided we correct binding
+ val origPatWithoutBind = commonPattern match {
+ case Bind(b, orig) => orig
+ case o => o
+ }
+ // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
+ val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
+ (Bind(binder, origPatWithoutBind), unifiedBody)
+ }
+
+ List(CaseDef(pat, EmptyTree, guardedBodySubst))
+ }
+
+ @annotation.tailrec def partitionAndCollapse(cases: List[CaseDef], accum: List[CaseDef] = Nil): List[CaseDef] =
+ if (cases.isEmpty) accum
+ else {
+ val (same, others) = cases.tail partition (caseChecksSameConst(cases.head))
+ partitionAndCollapse(others, accum ++ collapse(cases.head :: same))
+ }
+
+ // common case: no rewrite needed when there are no guards
+ if (cases.forall(c => c.guard == EmptyTree)) cases
+ else partitionAndCollapse(cases)
+ }
+
+ private def caseChecksSameConst(x: CaseDef)(y: CaseDef) = (x, y) match {
+ // regular switch
+ case (CaseDef(Literal(Constant(cx)), _, _), CaseDef(Literal(Constant(cy)), _, _)) => cx == cy
+ case (CaseDef(Ident(nme.WILDCARD), _, _), CaseDef(Ident(nme.WILDCARD), _, _)) => true
+ // type-switch for catch
+ case (CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpX)), _, _), CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpY)), _, _)) => tpX.tpe =:= tpY.tpe
+ case _ => false
+ }
+
+ private def checkNoGuards(cs: List[CaseDef]) =
+ if (cs.exists{case CaseDef(_, g, _) => g != EmptyTree case _ => false}) None
+ else Some(cs)
+
+ // requires(cs.forall(_.guard == EmptyTree))
+ private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
+ var cases = cs
+ var unreachable: Option[CaseDef] = None
+
+ while (cases.nonEmpty && unreachable.isEmpty) {
+ if (isDefault(cases.head) && cases.tail.nonEmpty) unreachable = Some(cases.tail.head)
+ else unreachable = cases.tail.find(caseChecksSameConst(cases.head))
+
+ cases = cases.tail
+ }
+
+ unreachable
+ }
+
// empty list ==> failure
def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
val caseDefs = cases map { case (scrutSym, makers) =>
makers match {
// default case
- case (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(defaultCase(scrutSym, btm.substitution(body)))
+ case GuardAndBodyTreeMakers(guard, body) =>
+ Some(defaultCase(scrutSym, guard, body))
// constant (or typetest for typeSwitch)
- case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
+ case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
+ Some(CaseDef(pattern, guard, body))
// alternatives
- case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
- val casePatterns = altss map {
+ case AlternativesTreeMaker(_, altss, _) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
+ val switchableAlts = altss map {
case SwitchableTreeMaker(pattern) :: Nil =>
Some(pattern)
case _ =>
None
}
- sequence(casePatterns) map { patterns =>
- val substedBody = btm.substitution(body)
- CaseDef(Alternative(patterns), EmptyTree, substedBody)
+ // succeed if they were all switchable
+ sequence(switchableAlts) map { switchableAlts =>
+ CaseDef(Alternative(switchableAlts), guard, body)
}
- case _ => // patmatDebug("can't emit switch for "+ makers)
+ case _ =>
+ // patmatDebug("can't emit switch for "+ makers)
None //failure (can't translate pattern to a switch)
}
}
(for(
- caseDefs <- sequence(caseDefs)) yield
- if (caseDefs exists isDefault) caseDefs
- else {
- caseDefs :+ defaultCase()
+ caseDefsWithGuards <- sequence(caseDefs);
+ collapsed = collapseGuardedCases(caseDefsWithGuards);
+ caseDefs <- checkNoGuards(collapsed)) yield {
+ if (!unchecked)
+ unreachableCase(caseDefs) foreach (cd => reportUnreachable(cd.body.pos))
+
+ // if we rewrote, we may need the default label to jump there (but then again, we may not)
+ // TODO: make more precise; we don't need the default label if
+ // - all collapsed cases included an un-guarded case (some of the guards of each case will always be true)
+ // - or: there was no default case (if all the guards of a case fail, it's a matcherror for sure)
+ val needDefaultLabel = (collapsed != caseDefsWithGuards)
+
+ def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
+ if (needDefaultLabel && canJump) deriveCaseDef(cd){ b =>
+ // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
+ defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
+ LabelDef(defaultLabel, Nil, b)
+ } else cd
+
+ (caseDefs partition isDefault) match {
+ case (Nil, caseDefs) => caseDefs :+ wrapInDefaultLabelDef(defaultCase())
+ case (default :: Nil, caseDefs) if canJump || !needDefaultLabel =>
+ // we either didn't collapse (and thus definitely didn't have to emit a jump),
+ // or we canJump (and then the potential jumps in collapsed are ok)
+ caseDefs :+ wrapInDefaultLabelDef(default)
+ case _ => Nil
+ // TODO: if (canJump) error message (but multiple defaults should be caught by unreachability)
+ // if (!canJump) we got ourselves in the situation where we might need to emit a jump when we can't (in exception handler)
+ // --> TODO: refine the condition to detect whether we actually really needed to jump, but this seems relatively rare
}
+ }
) getOrElse Nil
}
}
- class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
val alternativesSupported = true
+ val canJump = true
object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
@@ -2988,13 +3132,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def defaultSym: Symbol = scrutSym
def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- DEFAULT ==> body
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (DEFAULT IF guard) ==> body
}}
}
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
@@ -3014,8 +3158,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// for the catch-cases in a try/catch
private object typeSwitchMaker extends SwitchMaker {
+ val unchecked = false
def switchableTpe(tp: Type) = true
val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+ val canJump = false
// TODO: there are more treemaker-sequences that can be handled by type tests
// analyze the result of approximateTreeMaker rather than the TreeMaker itself
@@ -3037,8 +3183,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
def defaultBody: Tree = Throw(CODE.REF(defaultSym))
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
}}
}
@@ -3082,34 +3228,34 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
matchEnd setInfo MethodType(List(matchRes), restpe)
def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag SYNTH_CASE
- var nextCase = newCaseSym
- def caseDef(mkCase: Casegen => Tree): Tree = {
- val currCase = nextCase
- nextCase = newCaseSym
- val casegen = new OptimizedCasegen(matchEnd, nextCase, restpe)
- LabelDef(currCase, Nil, mkCase(casegen))
+ var _currCase = newCaseSym
+
+ val caseDefs = cases map { (mkCase: Casegen => Tree) =>
+ val currCase = _currCase
+ val nextCase = newCaseSym
+ _currCase = nextCase
+
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase, restpe)))
}
- def catchAll = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
- // must jump to matchEnd, use result generated by matchFailGen (could be `FALSE` for isDefinedAt)
- LabelDef(nextCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
- // don't cast the arg to matchEnd when using PartialFun synth in uncurry, since it won't detect the throw (see gen.withDefaultCase)
- // the cast is necessary when using typedMatchAnonFun-style PartialFun synth:
- // (_asInstanceOf(matchFailGen(scrutRef), restpe))
- } toList
+ // must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+
+ LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ } toList // at most 1 element
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
// the assumption is once we encounter a case, the remainder of the block will consist of cases
// the prologue may be empty, usually it is the valdef that stores the scrut
// val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
-
- // scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
Block(
- scrutDef ++ (cases map caseDef) ++ catchAll,
+ scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
)
}
@@ -3179,16 +3325,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
if (!unchecked) {
unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- typer.context.unit.warning(cases(caseIndex).last.pos, "unreachable code")
+ reportUnreachable(cases(caseIndex).last.pos)
}
- }
- val counterExamples = if (unchecked) Nil else exhaustive(prevBinder, cases, pt)
- if (counterExamples.nonEmpty) {
- val ceString =
- if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
- else "inputs: " + counterExamples.mkString(", ")
-
- typer.context.unit.warning(prevBinder.pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ val counterExamples = exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty)
+ reportMissingCases(prevBinder.pos, counterExamples)
}
val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 44fd4e9afd..7318538de7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -536,7 +536,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred && other.isJavaDefined && {
+ !other.isDeferred && other.isJavaDefined && !sym.enclClass.isSubClass(other.enclClass) && {
// #3622: erasure operates on uncurried types --
// note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
// !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 52cce11deb..ccd346e72d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1753,6 +1753,12 @@ trait Typers extends Modes with Adaptations with Tags {
if (clazz.info.firstParent.typeSymbol == AnyValClass)
validateDerivedValueClass(clazz, body1)
+ if (clazz.isTrait) {
+ for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
+ unit.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
+ }
+ }
+
treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
}
@@ -4614,10 +4620,33 @@ trait Typers extends Modes with Adaptations with Tags {
if (impSym.exists) {
var impSym1: Symbol = NoSymbol
var imports1 = imports.tail
+
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
def ambiguousImport() = {
- if (!(imports.head.qual.tpe =:= imports1.head.qual.tpe && impSym == impSym1))
- ambiguousError(
- "it is imported twice in the same scope by\n"+imports.head + "\nand "+imports1.head)
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imports.head.qual.tpe
+ def t2 = imports1.head.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType impSym
+ def mt2 = t2 memberType impSym1
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ if (t1 =:= t2 && impSym == impSym1)
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
+ else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
+ else {
+ log(s"Import is genuinely ambiguous: !($t1 =:= $t2)")
+ ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
+ }
}
while (errorContainer == null && !imports1.isEmpty &&
(!imports.head.isExplicitImport(name) ||
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 4c20d14406..1b89f3db44 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -197,42 +197,61 @@ trait Unapplies extends ast.TreeDSL
)
}
+ /**
+ * Generates copy methods for case classes. Copy only has defaults on the first
+ * parameter list, as of SI-5009.
+ *
+ * The parameter types of the copy method need to be exactly the same as the parameter
+ * types of the primary constructor. Just copying the TypeTree is not enough: a type `C`
+ * might refer to something else *inside* the class (i.e. as parameter type of `copy`)
+ * than *outside* the class (i.e. in the class parameter list).
+ *
+ * One such example is t0054.scala:
+ * class A {
+ * case class B(x: C) extends A { def copy(x: C = x) = ... }
+ * class C {} ^ ^
+ * } (1) (2)
+ *
+ * The reference (1) to C is `A.this.C`. The reference (2) is `B.this.C` - not the same.
+ *
+ * This is fixed with a hack currently. `Unapplies.caseClassCopyMeth`, which creates the
+ * copy method, uses empty `TypeTree()` nodes for parameter types.
+ *
+ * In `Namers.enterDefDef`, the copy method gets a special type completer (`enterCopyMethod`).
+ * Before computing the body type of `copy`, the class parameter types are assigned the copy
+ * method parameters.
+ *
+ * This attachment class stores the copy method parameter ValDefs as an attachment in the
+ * ClassDef of the case class.
+ */
def caseClassCopyMeth(cdef: ClassDef): Option[DefDef] = {
def isDisallowed(vd: ValDef) = isRepeatedParamType(vd.tpt) || isByNameParamType(vd.tpt)
- val cparamss = constrParamss(cdef)
- val flat = cparamss.flatten
+ val classParamss = constrParamss(cdef)
- if (cdef.symbol.hasAbstractFlag || (flat exists isDisallowed)) None
+ if (cdef.symbol.hasAbstractFlag || mexists(classParamss)(isDisallowed)) None
else {
+ def makeCopyParam(vd: ValDef, putDefault: Boolean) = {
+ val rhs = if (putDefault) toIdent(vd) else EmptyTree
+ val flags = PARAM | (vd.mods.flags & IMPLICIT) | (if (putDefault) DEFAULTPARAM else 0)
+ // empty tpt: see comment above
+ val tpt = atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt)
+ treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
+ }
+
val tparams = cdef.tparams map copyUntypedInvariant
- // the parameter types have to be exactly the same as the constructor's parameter types; so it's
- // not good enough to just duplicated the (untyped) tpt tree; the parameter types are removed here
- // and re-added in ``finishWith'' in the namer.
- def paramWithDefault(vd: ValDef) =
- treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
-
- val (copyParamss, funParamss) = cparamss match {
- case Nil => (Nil, Nil)
+ val paramss = classParamss match {
+ case Nil => Nil
case ps :: pss =>
- (List(ps.map(paramWithDefault)), mmap(pss)(p => copyUntyped[ValDef](p).copy(rhs = EmptyTree)))
+ ps.map(makeCopyParam(_, putDefault = true)) :: mmap(pss)(makeCopyParam(_, putDefault = false))
}
val classTpe = classType(cdef, tparams)
- val bodyTpe = funParamss.foldRight(classTpe)((params, restp) => gen.scalaFunctionConstr(params.map(_.tpt), restp))
-
- val argss = copyParamss match {
- case Nil => Nil
- case ps :: _ => mmap(ps :: funParamss)(toIdent)
- }
- def mkFunction(vparams: List[ValDef], body: Tree) = Function(vparams, body)
- val body = funParamss.foldRight(New(classTpe, argss): Tree)(mkFunction)
- // [Eugene++] no longer compiles after I moved the `Function` case class into scala.reflect.internal
- // val body = funParamss.foldRight(New(classTpe, argss): Tree)(Function)
-
- Some(atPos(cdef.pos.focus)(
- DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, copyParamss, bodyTpe,
- body)
- ))
+ val argss = mmap(paramss)(toIdent)
+ val body: Tree = New(classTpe, argss)
+ val copyDefDef = atPos(cdef.pos.focus)(
+ DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, TypeTree(), body)
+ )
+ Some(copyDefDef)
}
}
}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index c782181f21..18cbf9c4b7 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -12,7 +12,14 @@ import scala.reflect.runtime.{universe => ru}
object StdTags {
// root mirror is fine for these guys, since scala-library.jar is guaranteed to be reachable from the root mirror
- lazy val tagOfString = ru.TypeTag.String
+ lazy val tagOfString = ru.TypeTag[String](
+ ru.rootMirror,
+ new TypeCreator {
+ def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Type = {
+ val u = m.universe
+ u.definitions.StringClass.asTypeConstructor
+ }
+ })
lazy val tagOfListOfString = ru.TypeTag[List[String]](
ru.rootMirror,
new TypeCreator {
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index b4178102b9..589c5c7eb0 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -216,7 +216,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
(singleton, jmeth)
}
- def runExpr(expr: Tree, freeTypes: Map[TypeName, Type] = Map[TypeName, Type]()): Any = {
+ def runExpr(expr: Tree): Any = {
val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
diff --git a/src/eclipse/README b/src/eclipse/README
deleted file mode 100644
index 58dbd83815..0000000000
--- a/src/eclipse/README
+++ /dev/null
@@ -1,23 +0,0 @@
-Eclipse project files
-=====================
-
-Import all projects inside Eclipse by choosing File/Import Existing Projects
-and navigate to src/eclipse. Check all projects and click ok.
-
-IMPORTANT
-=========
-
-You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
-Preferences/General/Workspace/Linked Resources. The value should be the absolute
-path to your scala checkout. All paths in project files are relative to this one,
-so nothing will work before you do so.
-
-DETAILS
-=======
-
-The compiler project depends on the library, reflect, asm and fjbg projects. The
-builder will take care of the correct ordering, and changes in one project will
-be picked up by the dependent projects.
-
-The output directory is set to be build/quick, so the runner scripts in quick
-work as they are (run an ant build to have the generated once) \ No newline at end of file
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
new file mode 100644
index 0000000000..7ef775218b
--- /dev/null
+++ b/src/eclipse/README.md
@@ -0,0 +1,52 @@
+Eclipse project files
+=====================
+
+Import all projects inside Eclipse by choosing File/Import Existing Projects
+and navigate to src/eclipse. Check all projects and click ok.
+
+IMPORTANT
+=========
+
+1. You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
+Preferences/General/Workspace/Linked Resources. The value should be the absolute
+path to your scala checkout. All paths in project files are relative to this one,
+so nothing will work before you do so.
+
+2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
+JDK. The Scala library uses such APIs, so you'd see this error:
+
+ Access restriction: The method compareAndSwapObject(Object, long, Object, Object)
+ from the type Unsafe is not accessible due to restriction on required library.
+You can *fix* it by allowing calls to restricted APIs in `Java=>Compiler=>Errors/Warnings=>Deprecated and Restricted API`
+settings.
+
+3. The IDE guesses the Scala library version by looking for `library.properties` inside
+the library jar. The `scala-library` project does not have such a file, so you will see
+an error about incompatible libraries. You can work around it by adding a `library.properties`
+inside `src/library` with the following contents:
+
+ #Mon, 04 Jun 2012 02:08:56 +0200
+ version.number=2.10.0-20120603-141530-b34313db72
+ maven.version.number=2.10.0-SNAPSHOT
+ osgi.version.number=2.10.0.v20120603-141530-b34313db72
+ copyright.string=Copyright 2002-2011, LAMP/EPFL
+
+4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
+from being shown as dirty in `git status`. You can still ignore them by telling Git to
+consider them unchanged:
+
+ git update-index --assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+If you want to go back to normal (for instance, to commit your changes to project files), run:
+
+ git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+DETAILS
+=======
+
+The compiler project depends on the library, reflect, asm and fjbg projects. The
+builder will take care of the correct ordering, and changes in one project will
+be picked up by the dependent projects.
+
+The output directory is set to be build/quick, so the runner scripts in quick
+work as they are (run an ant build to have them generated once) \ No newline at end of file
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
index 2a764d5142..3fb1d08d4d 100644
--- a/src/eclipse/reflect/.classpath
+++ b/src/eclipse/reflect/.classpath
@@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="output" path="build-quick-reflect"/>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index ff3b63f3ca..e0264b9856 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
@@ -9,6 +10,5 @@
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
<classpathentry kind="lib" path="lib/msil.jar"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry kind="output" path="build-quick-compiler"/>
</classpath>
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 99bd7f0736..44025d5358 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -100,11 +100,19 @@ object Predef extends LowPriorityImplicits {
// def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
+ @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
+ @annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
+ @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
val NoManifest = scala.reflect.NoManifest
def manifest[T](implicit m: Manifest[T]) = m
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 2a52d0ac0b..2de0c57253 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -18,8 +18,9 @@ import java.{ lang => jl }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
import scala.concurrent.util.Duration
-import scala.concurrent.impl.NonFatal
+import scala.util.control.NonFatal
import scala.Option
+import scala.util.{Try, Success, Failure}
import scala.annotation.tailrec
import scala.collection.mutable.Stack
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 7549bf8314..4c6347dce0 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -15,6 +15,7 @@ import java.util.Collection
import scala.concurrent.forkjoin._
import scala.concurrent.{ ExecutionContext, Awaitable }
import scala.concurrent.util.Duration
+import scala.util.control.NonFatal
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 6a3487adde..8012ea6a93 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -13,6 +13,7 @@ package scala.concurrent.impl
import scala.concurrent.util.Duration
import scala.concurrent.{Awaitable, ExecutionContext, CanAwait}
import scala.collection.mutable.Stack
+import scala.util.control.NonFatal
private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index af2ab04576..4471e417d9 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -87,6 +87,11 @@ object BigInt {
def apply(x: String, radix: Int): BigInt =
new BigInt(new BigInteger(x, radix))
+ /** Translates a `java.math.BigInteger` into a BigInt.
+ */
+ def apply(x: BigInteger): BigInt =
+ new BigInt(x)
+
/** Returns a positive BigInt that is probably prime, with the specified bitLength.
*/
def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt =
@@ -96,9 +101,13 @@ object BigInt {
*/
implicit def int2bigInt(i: Int): BigInt = apply(i)
- /** Implicit conversion from long to BigInt
+ /** Implicit conversion from `Long` to `BigInt`.
*/
implicit def long2bigInt(l: Long): BigInt = apply(l)
+
+ /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
+ */
+ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x)
}
/**
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 6460db534d..5f90c32e22 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -106,6 +106,15 @@ package object scala {
type PartialOrdering[T] = scala.math.PartialOrdering[T]
type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T]
+ type Either[+A, +B] = scala.util.Either[A, B]
+ val Either = scala.util.Either
+
+ type Left[+A, +B] = scala.util.Left[A, B]
+ val Left = scala.util.Left
+
+ type Right[+A, +B] = scala.util.Right[A, B]
+ val Right = scala.util.Right
+
// Annotations which we might move to annotation.*
/*
type SerialVersionUID = annotation.SerialVersionUID
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index d89d31f689..f143bf8712 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -11,23 +11,12 @@ package scala.reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
-/** A `ClassManifest[T]` is an opaque descriptor for type `T`.
- * It is used by the compiler to preserve information necessary
- * for instantiating `Arrays` in those cases where the element type
- * is unknown at compile time.
- *
- * The type-relation operators make an effort to present a more accurate
- * picture than can be realized with erased types, but they should not be
- * relied upon to give correct answers. In particular they are likely to
- * be wrong when variance is involved or when a subtype has a different
- * number of type arguments than a supertype.
- */
-@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
-trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with Serializable {
- /** A class representing the type `U` to which `T` would be erased. Note
- * that there is no subtyping relationship between `T` and `U`. */
- def erasure: jClass[_]
- override def runtimeClass: jClass[_] = erasure
+@deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
+ self: ClassManifest[T] =>
+
+ @deprecated("Use runtimeClass instead", "2.10.0")
+ def erasure: jClass[_] = runtimeClass
private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
@@ -53,6 +42,7 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def <:<(that: ClassManifest[_]): Boolean = {
// All types which could conform to these types will override <:<.
def cannotMatch = {
@@ -86,6 +76,7 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
@@ -94,49 +85,47 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
case _ => false
}
- /** Tests whether the type represented by this manifest is equal to
- * the type represented by `that` manifest, subject to the limitations
- * described in the header.
- */
- override def equals(that: Any): Boolean = that match {
- case m: ClassManifest[_] => (m canEqual this) && (this.erasure == m.erasure)
- case _ => false
- }
- override def hashCode = this.erasure.##
-
protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
+ @deprecated("Use wrap instead", "2.10.0")
def arrayManifest: ClassManifest[Array[T]] =
ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
override def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+ @deprecated("Use wrap.newArray instead", "2.10.0")
def newArray2(len: Int): Array[Array[T]] =
java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
.asInstanceOf[Array[Array[T]]]
+ @deprecated("Use wrap.wrap.newArray instead", "2.10.0")
def newArray3(len: Int): Array[Array[Array[T]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
.asInstanceOf[Array[Array[Array[T]]]]
+ @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
.asInstanceOf[Array[Array[Array[Array[T]]]]]
+ @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
.asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
+ @deprecated("Create WrappedArray directly instead", "2.10.0")
def newWrappedArray(len: Int): WrappedArray[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
+ @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0")
def newArrayBuilder(): ArrayBuilder[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
def typeArguments: List[OptManifest[_]] = List()
protected def argString =
@@ -145,25 +134,33 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
else ""
}
-/** The object `ClassManifest` defines factory methods for manifests.
+/** `ClassManifestFactory` defines factory methods for manifests.
* It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ *
+ * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object
+ * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it.
+ * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`,
+ * so we need to somehow nudge them into migrating prior to removing stuff out of the blue.
+ * Hence we've introduced this design decision as the lesser of two evils.
*/
-@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
-object ClassManifest {
- val Byte = Manifest.Byte
- val Short = Manifest.Short
- val Char = Manifest.Char
- val Int = Manifest.Int
- val Long = Manifest.Long
- val Float = Manifest.Float
- val Double = Manifest.Double
- val Boolean = Manifest.Boolean
- val Unit = Manifest.Unit
- val Any = Manifest.Any
- val Object = Manifest.Object
- val AnyVal = Manifest.AnyVal
- val Nothing = Manifest.Nothing
- val Null = Manifest.Null
+object ClassManifestFactory {
+ val Byte = ManifestFactory.Byte
+ val Short = ManifestFactory.Short
+ val Char = ManifestFactory.Char
+ val Int = ManifestFactory.Int
+ val Long = ManifestFactory.Long
+ val Float = ManifestFactory.Float
+ val Double = ManifestFactory.Double
+ val Boolean = ManifestFactory.Boolean
+ val Unit = ManifestFactory.Unit
+ val Any = ManifestFactory.Any
+ val Object = ManifestFactory.Object
+ val AnyVal = ManifestFactory.AnyVal
+ val Nothing = ManifestFactory.Nothing
+ val Null = ManifestFactory.Null
def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match {
case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
@@ -211,7 +208,7 @@ object ClassManifest {
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassManifest[T] {
- def erasure = clazz
+ override def runtimeClass = clazz
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -223,7 +220,7 @@ object ClassManifest {
*/
def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassManifest[T] {
- def erasure = upperbound.erasure
+ override def runtimeClass = upperbound.erasure
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -233,7 +230,7 @@ object ClassManifest {
* a top-level or static class */
private class ClassTypeManifest[T <: AnyRef](
prefix: Option[OptManifest[_]],
- val erasure: jClass[_],
+ val runtimeClass: jClass[_],
override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
{
override def toString =
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 860e7bac28..5255c44f10 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -19,18 +19,20 @@ import scala.runtime.ScalaRunTime.arrayClass
* @see [[scala.reflect.base.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
-trait ClassTag[T] extends Equals with Serializable {
+trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
- /** Returns a runtime class of type `T` */
+ /** A class representing the type `U` to which `T` would be erased.
+ * Note that there is no subtyping relationship between `T` and `U`.
+ */
def runtimeClass: jClass[_]
/** Produces a `ClassTag` that knows how to build `Array[Array[T]]` */
def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass))
/** Produces a new array with element type `T` and length `len` */
- def newArray(len: Int): Array[T] =
+ override def newArray(len: Int): Array[T] =
runtimeClass match {
case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]]
case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]]
@@ -65,7 +67,6 @@ object ClassTag {
private val NothingTYPE = classOf[scala.runtime.Nothing$]
private val NullTYPE = classOf[scala.runtime.Null$]
private val ObjectTYPE = classOf[java.lang.Object]
- private val StringTYPE = classOf[java.lang.String]
val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte]{ def runtimeClass = java.lang.Byte.TYPE; private def readResolve() = ClassTag.Byte }
val Short : ClassTag[scala.Short] = new ClassTag[scala.Short]{ def runtimeClass = java.lang.Short.TYPE; private def readResolve() = ClassTag.Short }
@@ -78,11 +79,10 @@ object ClassTag {
val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit]{ def runtimeClass = java.lang.Void.TYPE; private def readResolve() = ClassTag.Unit }
val Any : ClassTag[scala.Any] = new ClassTag[scala.Any]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Any }
val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Object }
- val AnyVal : ClassTag[scala.AnyVal] = new ClassTag[scala.AnyVal]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.AnyVal }
- val AnyRef : ClassTag[scala.AnyRef] = new ClassTag[scala.AnyRef]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.AnyRef }
+ val AnyVal : ClassTag[scala.AnyVal] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyVal]]
+ val AnyRef : ClassTag[scala.AnyRef] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyRef]]
val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing]{ def runtimeClass = NothingTYPE; private def readResolve() = ClassTag.Nothing }
val Null : ClassTag[scala.Null] = new ClassTag[scala.Null]{ def runtimeClass = NullTYPE; private def readResolve() = ClassTag.Null }
- val String : ClassTag[java.lang.String] = new ClassTag[java.lang.String]{ def runtimeClass = StringTYPE; private def readResolve() = ClassTag.String }
def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
runtimeClass1 match {
@@ -96,7 +96,6 @@ object ClassTag {
case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
- case StringTYPE => ClassTag.String.asInstanceOf[ClassTag[T]]
case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 7e320b42eb..9347f5b6bb 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -39,7 +39,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
*
*/
@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
-@deprecated("Use TypeTag instead", "2.10.0")
+@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -72,17 +72,19 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend
override val hashCode = System.identityHashCode(this)
}
-/** The object `Manifest` defines factory methods for manifests.
- * It is intended for use by the compiler and should not be used
- * in client code.
+/** `ManifestFactory` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `Manifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ * Why so complicated? Read up the comments for `ClassManifestFactory`.
*/
-@deprecated("Use TypeTag instead", "2.10.0")
-object Manifest {
+object ManifestFactory {
def valueManifests: List[AnyValManifest[_]] =
List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
- def erasure = java.lang.Byte.TYPE
+ def runtimeClass = java.lang.Byte.TYPE
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
@@ -90,7 +92,7 @@ object Manifest {
}
val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
- def erasure = java.lang.Short.TYPE
+ def runtimeClass = java.lang.Short.TYPE
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
@@ -98,7 +100,7 @@ object Manifest {
}
val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
- def erasure = java.lang.Character.TYPE
+ def runtimeClass = java.lang.Character.TYPE
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
@@ -106,7 +108,7 @@ object Manifest {
}
val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
- def erasure = java.lang.Integer.TYPE
+ def runtimeClass = java.lang.Integer.TYPE
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
@@ -114,7 +116,7 @@ object Manifest {
}
val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
- def erasure = java.lang.Long.TYPE
+ def runtimeClass = java.lang.Long.TYPE
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
@@ -122,7 +124,7 @@ object Manifest {
}
val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
- def erasure = java.lang.Float.TYPE
+ def runtimeClass = java.lang.Float.TYPE
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
@@ -130,7 +132,7 @@ object Manifest {
}
val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
- def erasure = java.lang.Double.TYPE
+ def runtimeClass = java.lang.Double.TYPE
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
@@ -138,7 +140,7 @@ object Manifest {
}
val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
- def erasure = java.lang.Boolean.TYPE
+ def runtimeClass = java.lang.Boolean.TYPE
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
@@ -146,7 +148,7 @@ object Manifest {
}
val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
- def erasure = java.lang.Void.TYPE
+ def runtimeClass = java.lang.Void.TYPE
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
@@ -180,7 +182,7 @@ object Manifest {
}
private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
- lazy val erasure = value.getClass
+ lazy val runtimeClass = value.getClass
override lazy val toString = value.toString + ".type"
}
@@ -217,7 +219,7 @@ object Manifest {
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
- val erasure: Predef.Class[_],
+ val runtimeClass: Predef.Class[_],
override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
override def toString =
(if (prefix.isEmpty) "" else prefix.get.toString+"#") +
@@ -233,7 +235,7 @@ object Manifest {
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = upperBound
+ def runtimeClass = upperBound
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -242,7 +244,7 @@ object Manifest {
*/
def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
new Manifest[T] {
- def erasure = upperBound.erasure
+ def runtimeClass = upperBound.erasure
override def toString =
"_" +
(if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
@@ -252,7 +254,7 @@ object Manifest {
/** Manifest for the intersection type `parents_0 with ... with parents_n'. */
def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = parents.head.erasure
+ def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
} \ No newline at end of file
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 7b8037272c..95b4ddca1c 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -10,7 +10,7 @@ package scala.reflect
/** One of the branches of an [[scala.reflect.OptManifest]].
*/
-@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+@deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
object NoManifest extends OptManifest[Nothing] with Serializable {
override def toString = "<?>"
} \ No newline at end of file
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 46f23c4e22..0ea66cb53d 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -14,5 +14,5 @@ package scala.reflect
*
* @author Martin Odersky
*/
-@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+@deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
trait OptManifest[+T] extends Serializable \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 490a9e8c03..a4e6256f4d 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -23,7 +23,7 @@ class Base extends Universe { self =>
new TermSymbol(this, name, flags)
def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = {
- val c = newClassSymbol(name.toTypeName, pos, flags)
+ val c = new ModuleClassSymbol(this, name.toTypeName, flags)
val m = new ModuleSymbol(this, name.toTermName, flags, c)
(m, c)
}
@@ -77,6 +77,8 @@ class Base extends Universe { self =>
class ClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
extends TypeSymbol(owner, name, flags) with ClassSymbolBase
+ class ModuleClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
+ extends ClassSymbol(owner, name, flags) { override def isModuleClass = true }
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
class FreeTermSymbol(owner: Symbol, name: TermName, flags: FlagSet)
@@ -93,11 +95,14 @@ class Base extends Universe { self =>
}
// todo. write a decent toString that doesn't crash on recursive types
- class Type extends TypeBase { def typeSymbol: Symbol = NoSymbol }
+ class Type extends TypeBase {
+ def typeSymbol: Symbol = NoSymbol
+ def termSymbol: Symbol = NoSymbol
+ }
implicit val TypeTagg = ClassTag[Type](classOf[Type])
- val NoType = new Type
- val NoPrefix = new Type
+ val NoType = new Type { override def toString = "NoType" }
+ val NoPrefix = new Type { override def toString = "NoPrefix" }
class SingletonType extends Type
implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
@@ -106,7 +111,7 @@ class Base extends Universe { self =>
object ThisType extends ThisTypeExtractor
implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
- case class SingleType(pre: Type, sym: Symbol) extends SingletonType
+ case class SingleType(pre: Type, sym: Symbol) extends SingletonType { override val termSymbol = sym }
object SingleType extends SingleTypeExtractor
implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
@@ -341,9 +346,9 @@ class Base extends Universe { self =>
class Mirror extends MirrorOf[self.type] {
val universe: self.type = self
- lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags)
+ lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags) { override def isModuleClass = true }
lazy val RootPackage = new ModuleSymbol(NoSymbol, nme.ROOT, NoFlags, RootClass)
- lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags)
+ lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags) { override def isModuleClass = true }
lazy val EmptyPackage = new ModuleSymbol(RootClass, nme.EMPTY_PACKAGE_NAME, NoFlags, EmptyPackageClass)
def staticClass(fullName: String): ClassSymbol =
@@ -420,7 +425,6 @@ class Base extends Universe { self =>
lazy val NullTpe = TypeRef(ScalaPrefix, NullClass, Nil)
lazy val ObjectTpe = TypeRef(JavaLangPrefix, ObjectClass, Nil)
lazy val AnyRefTpe = ObjectTpe
- lazy val StringTpe = TypeRef(JavaLangPrefix, StringClass, Nil)
private var nodeCount = 0 // not synchronized
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
index eff23b539e..2f270a5911 100644
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ b/src/library/scala/reflect/base/StandardDefinitions.scala
@@ -27,7 +27,6 @@ trait StandardTypes {
val NothingTpe: Type
val NullTpe: Type
- val StringTpe: Type
}
trait StandardDefinitions extends StandardTypes {
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
index 9404520073..ced1f33395 100644
--- a/src/library/scala/reflect/base/Symbols.scala
+++ b/src/library/scala/reflect/base/Symbols.scala
@@ -174,6 +174,12 @@ trait Symbols { self: Universe =>
*/
def isClass: Boolean = false
+ /** Does this symbol represent the definition of a class implicitly associated
+ * with an object definition (module class in scala compiler parlance).
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isModuleClass: Boolean = false
+
/** This symbol cast to a ClassSymbol representing a class or trait.
* Returns ClassCastException if `isClass` is false.
*/
@@ -244,6 +250,8 @@ trait Symbols { self: Universe =>
/** The base API that all module symbols support */
trait ModuleSymbolBase extends TermSymbolBase { this: ModuleSymbol =>
/** The class implicitly associated with the object definition.
+ * One can go back from a module class to the associated module symbol
+ * by inspecting its `selfType.termSymbol`.
*/
def moduleClass: Symbol // needed for tree traversals
// [Eugene++] when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
index 158d1979e5..a9f0b60fd2 100644
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ b/src/library/scala/reflect/base/TagInterop.scala
@@ -4,17 +4,6 @@ package base
import scala.runtime.ScalaRunTime._
trait TagInterop { self: Universe =>
- def classTagToClassManifest[T](tag: ClassTag[T]): ClassManifest[T] = {
- val runtimeClass = tag.runtimeClass
- if (runtimeClass.isArray) {
- val elementClass = arrayElementClass(runtimeClass)
- val elementManifest = classTagToClassManifest(ClassTag(elementClass))
- ClassManifest.arrayType(elementManifest).asInstanceOf[ClassManifest[T]]
- } else {
- ClassManifest.fromClass(runtimeClass.asInstanceOf[Class[T]])
- }
- }
-
// [Eugene++] `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
// if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
index 774bc6ebea..05b1a079d7 100644
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ b/src/library/scala/reflect/base/TypeTags.scala
@@ -134,10 +134,11 @@ trait TypeTags { self: Universe =>
val Boolean : AbsTypeTag[scala.Boolean] = TypeTag.Boolean
val Unit : AbsTypeTag[scala.Unit] = TypeTag.Unit
val Any : AbsTypeTag[scala.Any] = TypeTag.Any
+ val AnyVal : AbsTypeTag[scala.AnyVal] = TypeTag.AnyVal
+ val AnyRef : AbsTypeTag[scala.AnyRef] = TypeTag.AnyRef
val Object : AbsTypeTag[java.lang.Object] = TypeTag.Object
val Nothing : AbsTypeTag[scala.Nothing] = TypeTag.Nothing
val Null : AbsTypeTag[scala.Null] = TypeTag.Null
- val String : AbsTypeTag[java.lang.String] = TypeTag.String
def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): AbsTypeTag[T] =
tpec1(mirror1) match {
@@ -151,10 +152,11 @@ trait TypeTags { self: Universe =>
case BooleanTpe => AbsTypeTag.Boolean.asInstanceOf[AbsTypeTag[T]]
case UnitTpe => AbsTypeTag.Unit.asInstanceOf[AbsTypeTag[T]]
case AnyTpe => AbsTypeTag.Any.asInstanceOf[AbsTypeTag[T]]
+ case AnyValTpe => AbsTypeTag.AnyVal.asInstanceOf[AbsTypeTag[T]]
+ case AnyRefTpe => AbsTypeTag.AnyRef.asInstanceOf[AbsTypeTag[T]]
case ObjectTpe => AbsTypeTag.Object.asInstanceOf[AbsTypeTag[T]]
case NothingTpe => AbsTypeTag.Nothing.asInstanceOf[AbsTypeTag[T]]
case NullTpe => AbsTypeTag.Null.asInstanceOf[AbsTypeTag[T]]
- case StringTpe => AbsTypeTag.String.asInstanceOf[AbsTypeTag[T]]
case _ => new AbsTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
}
@@ -197,10 +199,11 @@ trait TypeTags { self: Universe =>
val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
+ val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
+ val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
- val String: TypeTag[java.lang.String] = new PredefTypeTag[java.lang.String] (StringTpe, _.TypeTag.String)
def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): TypeTag[T] =
tpec1(mirror1) match {
@@ -214,10 +217,11 @@ trait TypeTags { self: Universe =>
case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
+ case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
+ case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case StringTpe => TypeTag.String.asInstanceOf[TypeTag[T]]
case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
}
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
index 6106e3fde7..6e8ffc7984 100644
--- a/src/library/scala/reflect/base/Types.scala
+++ b/src/library/scala/reflect/base/Types.scala
@@ -6,6 +6,11 @@ trait Types { self: Universe =>
/** The base API that all types support */
abstract class TypeBase {
+ /** The term symbol associated with the type, or `NoSymbol` for types
+ * that do not refer to a term symbol.
+ */
+ def termSymbol: Symbol
+
/** The type symbol associated with the type, or `NoSymbol` for types
* that do not refer to a type symbol.
*/
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 2ebc82875e..9f9d4089c4 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -4,14 +4,51 @@ package object reflect {
lazy val basis: base.Universe = new base.Base
+ // in the new scheme of things ClassManifests are aliased to ClassTags
+ // this is done because we want `toArray` in collections work with ClassTags
+ // but changing it to use the ClassTag context bound without aliasing ClassManifest
+ // will break everyone who subclasses and overrides `toArray`
+ // luckily for us, aliasing doesn't hamper backward compatibility, so it's ideal in this situation
+ // I wish we could do the same for Manifests and TypeTags though
+
+ // note, by the way, that we don't touch ClassManifest the object
+ // because its Byte, Short and so on factory fields are incompatible with ClassTag's
+
+ /** A `ClassManifest[T]` is an opaque descriptor for type `T`.
+ * It is used by the compiler to preserve information necessary
+ * for instantiating `Arrays` in those cases where the element type
+ * is unknown at compile time.
+ *
+ * The type-relation operators make an effort to present a more accurate
+ * picture than can be realized with erased types, but they should not be
+ * relied upon to give correct answers. In particular they are likely to
+ * be wrong when variance is involved or when a subtype has a different
+ * number of type arguments than a supertype.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ type ClassManifest[T] = scala.reflect.ClassTag[T]
+
+ /** The object `ClassManifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ val ClassManifest = ClassManifestFactory
+
+ /** The object `Manifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ val Manifest = ManifestFactory
+
def classTag[T](implicit ctag: ClassTag[T]) = ctag
// typeTag incantation is defined inside scala.reflect.basis and scala.reflect.runtime.universe
// ClassTag class is defined in ClassTag.scala
- type TypeTag[T] = scala.reflect.basis.TypeTag[T]
+ type TypeTag[T] = scala.reflect.basis.TypeTag[T]
// ClassTag object is defined in ClassTag.scala
- lazy val TypeTag = scala.reflect.basis.TypeTag
+ lazy val TypeTag = scala.reflect.basis.TypeTag
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/Either.scala b/src/library/scala/util/Either.scala
index b35d8a7c8a..1a2e2d48d5 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -8,7 +8,7 @@
-package scala
+package scala.util
import language.implicitConversions
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 8faba236f0..988f68bc18 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -11,36 +11,77 @@ package scala.util
import collection.Seq
-
+import scala.util.control.NonFatal
/**
- * The `Try` type represents a computation that may either result in an exception,
- * or return a success value. It's analagous to the `Either` type.
+ * The `Try` type represents a computation that may either result in an exception, or return a
+ * successfully computed value. It's similar to, but semantically different from the [[scala.Either]] type.
+ *
+ * Instances of `Try[T]`, are either an instance of [[scala.util.Success]][T] or [[scala.util.Failure]][T].
+ *
+ * For example, `Try` can be used to perform division on a user-defined input, without the need to do explicit
+ * exception-handling in all of the places that an exception might occur.
+ *
+ * Example:
+ * {{{
+ * import scala.util.{Try, Success, Failure}
+ *
+ * def divide: Try[Int] = {
+ * val dividend = Try(Console.readLine("Enter an Int that you'd like to divide:\n").toInt)
+ * val divisor = Try(Console.readLine("Enter an Int that you'd like to divide by:\n").toInt)
+ * val problem = dividend.flatMap(x => divisor.map(y => x/y))
+ * problem match {
+ * case Success(v) =>
+ * println("Result of " + dividend.get + "/"+ divisor.get +" is: " + v)
+ * Success(v)
+ * case Failure(e) =>
+ * println("You must've divided by zero or entered something that's not an Int. Try again!")
+ * println("Info from the exception: " + e.getMessage)
+ * divide
+ * }
+ * }
+ *
+ * }}}
+ *
+ * An important property of `Try` shown in the above example is its ability to ''pipeline'', or chain, operations,
+ * catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
+ * pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
+ * upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
+ * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * default behavior in the case of failure.
+ *
+ * ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
+ * Serious system errors, on the other hand, will be thrown.
+ *
+ * `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack.
+ *
+ * @since 2.10
*/
sealed abstract class Try[+T] {
- /**
- * Returns true if the `Try` is a `Failure`, false otherwise.
+
+ /** Returns `true` if the `Try` is a `Failure`, `false` otherwise.
*/
def isFailure: Boolean
- /**
- * Returns true if the `Try` is a `Success`, false otherwise.
+ /** Returns `true` if the `Try` is a `Success`, `false` otherwise.
*/
def isSuccess: Boolean
- /**
- * Returns the value from this `Success` or the given argument if this is a `Failure`.
+ /** Returns the value from this `Success` or the given `default` argument if this is a `Failure`.
*/
def getOrElse[U >: T](default: => U) = if (isSuccess) get else default
- /**
- * Returns the value from this `Success` or throws the exception if this is a `Failure`.
+ /** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
+ */
+ def orElse[U >: T](default: => Try[U]) = if (isSuccess) this else default
+
+ /** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
def get: T
/**
- * Applies the given function f if this is a Result.
+ * Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`.
*/
def foreach[U](f: T => U): Unit
@@ -54,39 +95,35 @@ sealed abstract class Try[+T] {
*/
def map[U](f: T => U): Try[U]
- def collect[U](pf: PartialFunction[T, U]): Try[U]
-
- def exists(p: T => Boolean): Boolean
-
/**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
def filter(p: T => Boolean): Try[T]
/**
- * Converts this to a `Failure` if the predicate is not satisfied.
- */
- def filterNot(p: T => Boolean): Try[T] = filter(x => !p(x))
-
- /**
- * Calls the exceptionHandler with the exception if this is a `Failure`. This is like `flatMap` for the exception.
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like `flatMap` for the exception.
*/
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U]
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
/**
- * Calls the exceptionHandler with the exception if this is a `Failure`. This is like map for the exception.
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like map for the exception.
*/
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U]
+ def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
def toOption = if (isSuccess) Some(get) else None
+ /**
+ * Returns an empty `Seq` (usually a `List`) if this is a `Failure` or a `Seq` containing the value if this is a `Success`.
+ */
def toSeq = if (isSuccess) Seq(get) else Seq()
/**
- * Returns the given function applied to the value from this Success or returns this if this is a `Failure`.
+ * Returns the given function applied to the value from this `Success` or returns this if this is a `Failure`.
* Alias for `flatMap`.
*/
def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
@@ -97,42 +134,76 @@ sealed abstract class Try[+T] {
*/
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
+ /**
+ * Completes this `Try` with an exception wrapped in a `Success`. The exception is either the exception that the
+ * `Try` failed with (if a `Failure`) or an `UnsupportedOperationException`.
+ */
def failed: Try[Throwable]
+
+ /** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
+ * `s` if this is a `Success`.
+ */
+ def transform[U](f: Throwable => Try[U], s: T => Try[U]): Try[U] = this match {
+ case Success(v) => s(v)
+ case Failure(e) => f(e)
+ }
+
}
+object Try {
+
+ implicit def try2either[T](tr: Try[T]): Either[Throwable, T] = {
+ tr match {
+ case Success(v) => Right(v)
+ case Failure(t) => Left(t)
+ }
+ }
-final class Failure[+T](val exception: Throwable) extends Try[T] {
- def isFailure: Boolean = true
- def isSuccess: Boolean = false
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = {
- try {
- if (rescueException.isDefinedAt(exception)) rescueException(exception) else this
- } catch {
- case e2 => Failure(e2)
+ implicit def either2try[T](ei: Either[Throwable, T]): Try[T] = {
+ ei match {
+ case Right(v) => Success(v)
+ case Left(t) => Failure(t)
+ }
+ }
+
+ def apply[T](r: => T): Try[T] = {
+ try { Success(r) } catch {
+ case NonFatal(e) => Failure(e)
}
}
+
+}
+
+final case class Failure[+T](val exception: Throwable) extends Try[T] {
+ def isFailure: Boolean = true
+ def isSuccess: Boolean = false
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
+ if (f.isDefinedAt(exception)) f(exception) else this
def get: T = throw exception
def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = Failure[U](exception)
def foreach[U](f: T => U): Unit = {}
def map[U](f: T => U): Try[U] = Failure[U](exception)
- def collect[U](pf: PartialFunction[T, U]): Try[U] = Failure[U](exception)
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
- if (rescueException.isDefinedAt(exception)) {
- Try(rescueException(exception))
- } else {
- this
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = {
+ try {
+ if (rescueException.isDefinedAt(exception)) {
+ Try(rescueException(exception))
+ } else {
+ this
+ }
+ } catch {
+ case NonFatal(e) => Failure(e)
}
- def exists(p: T => Boolean): Boolean = false
+ }
def failed: Try[Throwable] = Success(exception)
}
-final class Success[+T](value: T) extends Try[T] {
+final case class Success[+T](value: T) extends Try[T] {
def isFailure: Boolean = false
def isSuccess: Boolean = true
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
def get = value
def flatMap[U](f: T => Try[U]): Try[U] =
try f(value)
@@ -142,43 +213,14 @@ final class Success[+T](value: T) extends Try[T] {
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
def foreach[U](f: T => U): Unit = f(value)
def map[U](f: T => U): Try[U] = Try[U](f(value))
- def collect[U](pf: PartialFunction[T, U]): Try[U] =
- if (pf isDefinedAt value) Success(pf(value))
- else Failure[U](new NoSuchElementException("Partial function not defined at " + value))
- def filter(p: T => Boolean): Try[T] =
- if (p(value)) this
- else Failure(new NoSuchElementException("Predicate does not hold for " + value))
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
- def exists(p: T => Boolean): Boolean = p(value)
- def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
-}
-
-object Failure {
- def apply[T](e: Throwable): Failure[T] = new Failure(e)
- def unapply(scrutinizee: Any): Option[Throwable] = scrutinizee match {
- case Right(_) => None
- case Left(e) => Some(e.asInstanceOf[Throwable])
- case s: Success[_] => None
- case f: Failure[_] => Some(f.exception)
- }
-}
-
-object Success {
- def apply[T](value: T): Success[T] = new Success(value)
- def unapply[T](scrutinizee: Any): Option[T] = scrutinizee match {
- case Right(v) => Some(v.asInstanceOf[T])
- case Left(_) => None
- case s: Success[_] => Some(s.get.asInstanceOf[T])
- case f: Failure[Throwable] => None
- }
-}
-
-object Try {
-
- def apply[T](r: => T): Try[T] = {
- try { Success(r) } catch {
- case e => Failure(e)
+ def filter(p: T => Boolean): Try[T] = {
+ try {
+ if (p(value)) this
+ else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch {
+ case NonFatal(e) => Failure(e)
}
}
-
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
+ def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
}
diff --git a/src/library/scala/concurrent/impl/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
index bc509e664c..9da2f63307 100644
--- a/src/library/scala/concurrent/impl/NonFatal.scala
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -6,32 +6,33 @@
** |/ **
\* */
-package scala.concurrent
-package impl
+package scala.util.control
/**
- * Extractor of non-fatal Throwables. Will not match fatal errors
- * like VirtualMachineError (OutOfMemoryError)
- * ThreadDeath, LinkageError and InterruptedException.
- * StackOverflowError is matched, i.e. considered non-fatal.
+ * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
+ * (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
+ * However, `StackOverflowError` is matched, i.e. considered non-fatal.
*
- * Usage to catch all harmless throwables:
+ * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
+ * `NonFatal` (and would therefore be thrown).
+ *
+ * For example, all harmless Throwables can be caught by:
* {{{
* try {
* // dangerous stuff
* } catch {
- * case NonFatal(e) => log.error(e, "Something not that bad")
+ * case NonFatal(e) => log.error(e, "Something not that bad.")
* }
* }}}
*/
-private[concurrent] object NonFatal {
+object NonFatal {
def unapply(t: Throwable): Option[Throwable] = t match {
case e: StackOverflowError ⇒ Some(e) // StackOverflowError ok even though it is a VirtualMachineError
// VirtualMachineError includes OutOfMemoryError and other fatal errors
- case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError ⇒ None
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => None
case e ⇒ Some(e)
}
}
-
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 7f4ff8a7fb..27d3b8ba7d 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -10,12 +10,15 @@ trait Printers { self: Universe =>
protected var printTypes = false
protected var printIds = false
protected var printKinds = false
+ protected var printMirrors = false
def withTypes: this.type = { printTypes = true; this }
def withoutTypes: this.type = { printTypes = false; this }
def withIds: this.type = { printIds = true; this }
def withoutIds: this.type = { printIds = false; this }
def withKinds: this.type = { printKinds = true; this }
def withoutKinds: this.type = { printKinds = false; this }
+ def withMirrors: this.type = { printMirrors = true; this }
+ def withoutMirrors: this.type = { printMirrors = false; this }
}
case class BooleanFlag(val value: Option[Boolean])
@@ -25,13 +28,14 @@ trait Printers { self: Universe =>
implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
}
- protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String = {
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
val buffer = new StringWriter()
val writer = new PrintWriter(buffer)
var printer = mkPrinter(writer)
printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
printer.print(what)
writer.flush()
buffer.toString
@@ -40,42 +44,25 @@ trait Printers { self: Universe =>
/** By default trees are printed with `show` */
override protected def treeToString(tree: Tree) = show(tree)
- /** Renders a prettified representation of a tree.
+ /** Renders a prettified representation of a reflection artifact.
* Typically it looks very close to the Scala code it represents.
- * This function is used in Tree.toString.
*/
- def show(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
- render(tree, newTreePrinter(_), printTypes, printIds, printKinds)
+ def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
- /** Hook to define what `show(tree)` means.
+ /** Hook to define what `show(...)` means.
*/
def newTreePrinter(out: PrintWriter): TreePrinter
- /** Renders internal structure of a tree.
+ /** Renders internal structure of a reflection artifact.
*/
- def showRaw(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
- render(tree, newRawTreePrinter(_), printTypes, printIds, printKinds)
+ def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
- /** Hook to define what `showRaw(tree)` means.
+ /** Hook to define what `showRaw(...)` means.
*/
def newRawTreePrinter(out: PrintWriter): TreePrinter
- /** Renders a prettified representation of a symbol.
- */
- def show(sym: Symbol): String = sym.toString
-
- /** Renders internal structure of a symbol.
- */
- def showRaw(sym: Symbol): String = render(sym, newRawTreePrinter(_))
-
- /** Renders a prettified representation of a type.
- */
- def show(tpe: Type): String = tpe.toString
-
- /** Renders internal structure of a type.
- */
- def showRaw(tpe: Type): String = render(tpe, newRawTreePrinter(_))
-
/** Renders a prettified representation of a name.
*/
def show(name: Name): String
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index b797c71f6d..231b9b248b 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -62,6 +62,10 @@ trait Types extends base.Types { self: Universe =>
* the empty list for all other types */
def typeParams: List[Symbol]
+ /** For a (nullary) method or poly type, its direct result type,
+ * the type itself for all other types. */
+ def resultType: Type
+
/** Is this type a type constructor that is missing its type arguments?
*/
def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no?
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 3bde57ded8..0bf5aa4b69 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -7,19 +7,21 @@ trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
class BuildImpl extends BuildBase {
- def selectType(owner: Symbol, name: String): TypeSymbol = {
- val result = owner.info.decl(newTypeName(name))
- if (result ne NoSymbol) result.asTypeSymbol
- else MissingRequirementError.notFound("type %s in %s".format(name, owner.fullName))
- }
+ def selectType(owner: Symbol, name: String): TypeSymbol =
+ select(owner, newTypeName(name)).asTypeSymbol
def selectTerm(owner: Symbol, name: String): TermSymbol = {
- val sym = owner.info.decl(newTermName(name))
- val result =
- if (sym.isOverloaded) sym.suchThat(!_.isMethod)
- else sym
- if (result ne NoSymbol) result.asTermSymbol
- else MissingRequirementError.notFound("term %s in %s".format(name, owner.fullName))
+ val result = select(owner, newTermName(name)).asTermSymbol
+ if (result.isOverloaded) result.suchThat(!_.isMethod).asTermSymbol
+ else result
+ }
+
+ private def select(owner: Symbol, name: Name): Symbol = {
+ val result = owner.info decl name
+ if (result ne NoSymbol) result
+ else
+ mirrorThatLoaded(owner).tryMissingHooks(owner, name) orElse
+ MissingRequirementError.notFound("%s %s in %s".format(if (name.isTermName) "term" else "type", name, owner.fullName))
}
def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 320cd3ddae..7891433b4f 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -35,7 +35,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val AnyRefTpe = definitions.AnyRefClass.asType
lazy val NothingTpe = definitions.NothingClass.asType
lazy val NullTpe = definitions.NullClass.asType
- lazy val StringTpe = definitions.StringClass.asType
/** Since both the value parameter types and the result type may
* require access to the type parameter symbols, we model polymorphic
@@ -454,10 +453,10 @@ trait Definitions extends api.StandardDefinitions {
def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
- lazy val PartialManifestClass = requiredClass[scala.reflect.ClassManifest[_]]
- lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifest.type]
+ lazy val PartialManifestClass = getMemberType(ReflectPackage, tpnme.ClassManifest)
+ lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
lazy val FullManifestClass = requiredClass[scala.reflect.Manifest[_]]
- lazy val FullManifestModule = requiredModule[scala.reflect.Manifest.type]
+ lazy val FullManifestModule = requiredModule[scala.reflect.ManifestFactory.type]
lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index e3680b14d5..dedfd41e83 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -41,7 +41,7 @@ trait Mirrors extends api.Mirrors {
if (result != NoSymbol) result
else {
if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug
- mirrorMissingHook(owner, name) orElse symbolTableMissingHook(owner, name) orElse {
+ tryMissingHooks(owner, name) orElse {
MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror)
}
}
@@ -51,6 +51,8 @@ trait Mirrors extends api.Mirrors {
protected def symbolTableMissingHook(owner: Symbol, name: Name): Symbol = self.missingHook(owner, name)
+ private[reflect] def tryMissingHooks(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse symbolTableMissingHook(owner, name)
+
/** If you're looking for a class, pass a type name.
* If a module, a term name.
*/
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 82a8c42f7c..c018ddc88e 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -10,6 +10,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
+import compat.Platform.EOL
trait Printers extends api.Printers { self: SymbolTable =>
@@ -65,6 +66,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
printTypes = settings.printtypes.value
printIds = settings.uniqid.value
printKinds = settings.Yshowsymkinds.value
+ printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
protected def doPrintPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
@@ -477,22 +479,61 @@ trait Printers extends api.Printers { self: SymbolTable =>
def flush = { /* do nothing */ }
}
- // provides footnotes for types
- private var typeCounter = 0
- private val typeMap = collection.mutable.WeakHashMap[Type, Int]()
-
def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+ // provides footnotes for types and mirrors
+ import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
+ private val footnoteIndex = new FootnoteIndex
+ private class FootnoteIndex {
+ private val index = Map[Class[_], WeakHashMap[Any, Int]]()
+ private def classIndex[T: ClassTag] = index.getOrElseUpdate(classTag[T].runtimeClass, WeakHashMap[Any, Int]())
+ private val counters = Map[Class[_], Int]()
+ private def nextCounter[T: ClassTag] = {
+ val clazz = classTag[T].runtimeClass
+ counters.getOrElseUpdate(clazz, 0)
+ counters(clazz) = counters(clazz) + 1
+ counters(clazz)
+ }
+
+ def mkFootnotes() = new Footnotes
+ class Footnotes {
+ private val footnotes = Map[Class[_], SortedSet[Int]]()
+ private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]())
+
+ def put[T: ClassTag](any: T): Int = {
+ val index = classIndex[T].getOrElseUpdate(any, nextCounter[T])
+ classFootnotes[T] += index
+ index
+ }
+
+ def get[T: ClassTag]: List[(Int, Any)] =
+ classFootnotes[T].toList map (fi => (fi, classIndex[T].find{ case (any, ii) => ii == fi }.get._1))
+
+ def print[T: ClassTag](printer: Printers.super.TreePrinter): Unit = {
+ val footnotes = get[T]
+ if (footnotes.nonEmpty) {
+ printer.print(EOL)
+ footnotes.zipWithIndex foreach {
+ case ((fi, any), ii) =>
+ printer.print("[", fi, "] ", any)
+ if (ii < footnotes.length - 1) printer.print(EOL)
+ }
+ }
+ }
+ }
+ }
+
// emits more or less verbatim representation of the provided tree
class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
private var depth = 0
- private var footnotes = collection.mutable.Map[Int, Type]()
- private var printingFootnotes = false
private var printTypesInFootnotes = true
+ private var printingFootnotes = false
+ private var footnotes = footnoteIndex.mkFootnotes()
def print(args: Any*): Unit = {
+ // don't print type footnotes if the argument is a mere type
if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
printTypesInFootnotes = false
@@ -544,14 +585,15 @@ trait Printers extends api.Printers { self: SymbolTable =>
else print(sym.name)
if (printIds) print("#", sym.id)
if (printKinds) print("#", sym.abbreviatedKindString)
+ if (printMirrors) print("%M", footnotes.put[MirrorOf[_]](mirrorThatLoaded(sym)))
case NoType =>
print("NoType")
case NoPrefix =>
print("NoPrefix")
- case tpe: Type if printTypesInFootnotes && !printingFootnotes =>
- val index = typeMap.getOrElseUpdate(tpe, { typeCounter += 1; typeCounter })
- footnotes(index) = tpe
- print("[", index, "]")
+ case tpe: Type =>
+ val defer = printTypesInFootnotes && !printingFootnotes
+ if (defer) print("[", footnotes.put(tpe), "]")
+ else printProduct(tpe.asInstanceOf[Product])
case mods: Modifiers =>
print("Modifiers(")
if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
@@ -569,16 +611,11 @@ trait Printers extends api.Printers { self: SymbolTable =>
out.print(arg)
}
depth -= 1
- if (depth == 0 && footnotes.nonEmpty && !printingFootnotes) {
+ if (depth == 0 && !printingFootnotes) {
printingFootnotes = true
- out.println()
- val typeIndices = footnotes.keys.toList.sorted
- typeIndices.zipWithIndex foreach {
- case (typeIndex, i) =>
- print("[" + typeIndex + "] ")
- print(footnotes(typeIndex))
- if (i < typeIndices.length - 1) out.println()
- }
+ footnotes.print[Type](this)
+ footnotes.print[MirrorOf[_]](this)
+ printingFootnotes = false
}
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index bd02013037..72a99589d5 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -232,6 +232,7 @@ trait StdNames {
final val Annotation: NameType = "Annotation"
final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
+ final val ClassManifest: NameType = "ClassManifest"
final val Enum: NameType = "Enum"
final val Group: NameType = "Group"
final val Tree: NameType = "Tree"
@@ -639,8 +640,8 @@ trait StdNames {
val bytes: NameType = "bytes"
val canEqual_ : NameType = "canEqual"
val checkInitialized: NameType = "checkInitialized"
+ val ClassManifestFactory: NameType = "ClassManifestFactory"
val classOf: NameType = "classOf"
- val classTagToClassManifest: NameType = "classTagToClassManifest"
val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
val conforms: NameType = "conforms"
val copy: NameType = "copy"
@@ -696,6 +697,7 @@ trait StdNames {
val macroContext : NameType = "c"
val main: NameType = "main"
val manifest: NameType = "manifest"
+ val ManifestFactory: NameType = "ManifestFactory"
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 79041924a8..119c3d42fd 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -539,7 +539,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isConcreteClass = false
def isImplClass = false // the implementation class of a trait
def isJavaInterface = false
- def isModuleClass = false
def isNumericValueClass = false
def isPrimitiveValueClass = false
def isRefinementClass = false
@@ -900,15 +899,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (owner.isTerm) return false
if (isLocalDummy) return false
+ if (isAliasType) return true
if (isType && isNonClassType) return false
if (isRefinementClass) return false
return true
}
- // [Eugene] is it a good idea to add ``dealias'' to Symbol?
- /** Expands type aliases */
- def dealias: Symbol = this
-
/** The variance of this symbol as an integer */
final def variance: Int =
if (isCovariant) 1
@@ -2564,7 +2560,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
final override def isAliasType = true
- final override def dealias = info.typeSymbol.dealias
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
owner.newNonClassSymbol(name, pos, newFlags)
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 4cf2cceb81..56cc265e48 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -568,6 +568,24 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
+ def etaExpand: Type = this
+
+ /** Performs a single step of beta-reduction on types.
+ * Given:
+ *
+ * type C[T] = B[T]
+ * type B[T] = A
+ * class A
+ *
+ * The following will happen after `betaReduce` is invoked:
+ * TypeRef(pre, <C>, List(Int)) is replaced by
+ * TypeRef(pre, <B>, List(Int))
+ *
+ * Unlike `dealias`, which recursively applies beta reduction, until it's stuck,
+ * `betaReduce` performs exactly one step and then returns.
+ */
+ def betaReduce: Type = this
+
/** For a classtype or refined type, its defined or declared members;
* inherited by subtypes and typerefs.
* The empty scope for all other types.
@@ -2110,7 +2128,7 @@ trait Types extends api.Types { self: SymbolTable =>
//
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
- def betaReduce = transform(sym.info.resultType)
+ override def betaReduce = transform(sym.info.resultType)
// #3731: return sym1 for which holds: pre bound sym.name to sym and
// pre1 now binds sym.name to sym1, conceptually exactly the same
@@ -2221,7 +2239,7 @@ trait Types extends api.Types { self: SymbolTable =>
|| pre.isGround && args.forall(_.isGround)
)
- def etaExpand: Type = {
+ override def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
if (tpars.isEmpty) this
@@ -4275,18 +4293,6 @@ trait Types extends api.Types { self: SymbolTable =>
qvar
}).tpe
- /** Return `pre.baseType(clazz)`, or if that's `NoType` and `clazz` is a refinement, `pre` itself.
- * See bug397.scala for an example where the second alternative is needed.
- * The problem is that when forming the base type sequence of an abstract type,
- * any refinements in the base type list might be regenerated, and thus acquire
- * new class symbols. However, since refinements always have non-interesting prefixes
- * it looks OK to me to just take the prefix directly. */
- def base(pre: Type, clazz: Symbol) = {
- val b = pre.baseType(clazz)
- if (b == NoType && clazz.isRefinementClass) pre
- else b
- }
-
def apply(tp: Type): Type =
if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
else tp match {
@@ -4307,7 +4313,7 @@ trait Types extends api.Types { self: SymbolTable =>
pre1
}
} else {
- toPrefix(base(pre, clazz).prefix, clazz.owner)
+ toPrefix(pre.baseType(clazz).prefix, clazz.owner)
}
toPrefix(pre, clazz)
case SingleType(pre, sym) =>
@@ -4387,7 +4393,7 @@ trait Types extends api.Types { self: SymbolTable =>
case t =>
throwError
}
- } else toInstance(base(pre, clazz).prefix, clazz.owner)
+ } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
}
toInstance(pre, clazz)
case _ =>
@@ -5107,7 +5113,7 @@ trait Types extends api.Types { self: SymbolTable =>
false
private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
/** Do `tp1` and `tp2` denote equivalent types? */
@@ -5594,7 +5600,7 @@ trait Types extends api.Types { self: SymbolTable =>
val sym2 = tr2.sym
val pre1 = tr1.pre
val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || isSubType(pre1, pre2, depth)
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
(isUnifiable(pre1, pre2) ||
isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 38d280ec73..41955170bd 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -972,6 +972,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClassSymbol)
+ case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
}
}
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 4e82fe8ad2..7839850529 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -37,12 +37,17 @@ object ReflectionUtils {
systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
)
- def show(cl: ClassLoader) = {
+ def show(cl: ClassLoader): String = {
+ def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
+ if (clazz == null) return false
+ if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
+ return isAbstractFileClassLoader(clazz.getSuperclass)
+ }
def inferClasspath(cl: ClassLoader): String = cl match {
case cl: java.net.URLClassLoader =>
- "[" + (cl.getURLs mkString ",") + "]"
- case cl if cl != null && cl.getClass.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader" =>
- "[" + cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root + "] and " + inferClasspath(cl.getParent)
+ (cl.getURLs mkString ",")
+ case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
+ cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root.canonicalPath
case null =>
inferBootClasspath
case _ =>
@@ -50,7 +55,7 @@ object ReflectionUtils {
}
cl match {
case cl if cl != null =>
- "%s of type %s with classpath %s".format(cl, cl.getClass, inferClasspath(cl))
+ "%s of type %s with classpath [%s] and parent being %s".format(cl, cl.getClass, inferClasspath(cl), show(cl.getParent))
case null =>
"primordial classloader with boot classpath [%s]".format(inferClasspath(cl))
}