summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--META-INF/MANIFEST.MF4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala172
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala85
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala31
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala23
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala289
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala318
-rw-r--r--src/library/scala/util/control/Breaks.scala8
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala5
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala1
-rw-r--r--src/reflect/scala/reflect/api/Types.scala12
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala39
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala61
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala93
-rw-r--r--test/files/neg/t4842.check7
-rw-r--r--test/files/neg/t4842.scala (renamed from test/files/neg/t4842b.scala)4
-rw-r--r--test/files/neg/t4842a.check4
-rw-r--r--test/files/neg/t4842a.scala3
-rw-r--r--test/files/neg/t4842b.check4
-rw-r--r--test/files/neg/t4989.check7
-rw-r--r--test/files/neg/t4989.scala68
-rw-r--r--test/files/neg/t5761.scala16
-rw-r--r--test/files/pos/spec-params-new.scala2
-rw-r--r--test/files/pos/t5968.flags1
-rw-r--r--test/files/pos/t5968.scala8
-rw-r--r--test/files/run/t4809.scala34
-rw-r--r--test/files/run/t4935.check1
-rw-r--r--test/files/run/t4935.flags1
-rw-r--r--test/files/run/t4935.scala9
-rw-r--r--test/files/run/t5284.check1
-rw-r--r--test/files/run/t5284.scala25
-rw-r--r--test/files/run/t5284b.check1
-rw-r--r--test/files/run/t5284b.scala28
-rw-r--r--test/files/run/t5284c.check1
-rw-r--r--test/files/run/t5284c.scala30
-rw-r--r--test/files/run/t5966.check3
-rw-r--r--test/files/run/t5966.scala9
45 files changed, 956 insertions, 518 deletions
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 9cbe99ab23..28a70d2879 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -40,9 +40,13 @@ Export-Package:
scala.tools.nsc.util,
scala.tools.util,
scala.reflect.internal,
+ scala.reflect.internal.pickling,
scala.reflect.internal.settings,
+ scala.reflect.internal.util,
+ scala.reflect.makro,
scala.reflect.runtime,
scala.reflect.internal.transform,
+ scala.reflect.api,
ch.epfl.lamp.compiler.msil,
ch.epfl.lamp.compiler.msil.emit,
ch.epfl.lamp.compiler.msil.util,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index bce9f28847..3232bde3b4 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -253,7 +253,9 @@ self =>
final val InBlock = 1
final val InTemplate = 2
- lazy val ScalaValueClassNames = Seq(tpnme.AnyVal,
+ // These symbols may not yet be loaded (e.g. in the ide) so don't go
+ // through definitions to obtain the names.
+ lazy val ScalaValueClassNames = Seq(tpnme.AnyVal,
tpnme.Unit,
tpnme.Boolean,
tpnme.Byte,
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0c527fbaf4..59adcc637a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -458,6 +458,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -644,6 +648,86 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
} // end of class JBuilder
@@ -654,10 +738,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// more constants
// -----------------------------------------------------------------------------------------
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
-
val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
@@ -969,86 +1049,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
/** Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -3033,9 +3033,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- // TODO no inner classes attribute is written. Confirm intent.
- assert(innerClassBuffer.isEmpty, innerClassBuffer)
-
+ addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d4ee9b6b48..5cc6e78e9d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -100,9 +100,29 @@ abstract class DeadCodeElimination extends SubComponent {
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
+
case LOAD_LOCAL(l) =>
defs = defs + Pair(((bb, idx)), rd.vars)
-// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs)
+
+ case STORE_LOCAL(_) =>
+ /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
+ * (otherwise any side-effects of the module's constructor go lost).
+ * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
+ * are already marked (case clause below).
+ * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
+ * will have the module's load marked provided `isSideEffecting(m1)`.
+ * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
+ * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+ */
+ val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
+ val (bb1, idx1) = p
+ bb1(idx1) match {
+ case LOAD_MODULE(module) => isLoadNeeded(module)
+ case _ => false
+ }
+ }
+ if (necessary) worklist += ((bb, idx))
+
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
@@ -129,6 +149,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
+ private def isLoadNeeded(module: Symbol): Boolean = {
+ module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
+ }
+
/** Mark useful instructions. Instructions in the worklist are each inspected and their
* dependencies are marked useful too, and added to the worklist.
*/
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index bc4483923a..e5119eac71 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -323,7 +323,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// statements coming from the original class need retyping in the current context
debuglog("retyping " + stat2)
- val d = new specializeTypes.Duplicator
+ val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
d.retyped(localTyper.context1.asInstanceOf[d.Context],
stat2,
genericClazz,
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index c4c769d7cf..1d820afe11 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -450,7 +450,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
- params.filter(p => !p.isSpecialized || !isPrimitiveValueType(env(p)))
+ params filter {
+ p =>
+ !p.isSpecialized ||
+ !env.contains(p) ||
+ !isPrimitiveValueType(env(p))
+ }
/** Produces the symbols from type parameters `syms` of the original owner,
* in the given type environment `env`. The new owner is `nowner`.
@@ -1176,7 +1181,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|| specializedTypeVars(t1).nonEmpty
|| specializedTypeVars(t2).nonEmpty)
}
-
+
env forall { case (tvar, tpe) =>
matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || {
if (warnings)
@@ -1192,10 +1197,58 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+
+ def satisfiabilityConstraints(env: TypeEnv): Option[TypeEnv] = {
+ val noconstraints = Some(emptyEnv)
+ def matches(tpe1: Type, tpe2: Type): Option[TypeEnv] = {
+ val t1 = subst(env, tpe1)
+ val t2 = subst(env, tpe2)
+ // log("---------> " + tpe1 + " matches " + tpe2)
+ // log(t1 + ", " + specializedTypeVars(t1))
+ // log(t2 + ", " + specializedTypeVars(t2))
+ // log("unify: " + unify(t1, t2, env, false, false) + " in " + env)
+ if (t1 <:< t2) noconstraints
+ else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env, false, false) -- env.keys)
+ else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env, false, false) -- env.keys)
+ else None
+ }
+
+ env.foldLeft[Option[TypeEnv]](noconstraints) {
+ case (constraints, (tvar, tpe)) =>
+ val loconstraints = matches(tvar.info.bounds.lo, tpe)
+ val hiconstraints = matches(tpe, tvar.info.bounds.hi)
+ val allconstraints = for (c <- constraints; l <- loconstraints; h <- hiconstraints) yield c ++ l ++ h
+ allconstraints
+ }
+ }
- class Duplicator extends {
+ /** This duplicator additionally performs casts of expressions if that is allowed by the `casts` map. */
+ class Duplicator(casts: Map[Symbol, Type]) extends {
val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global
- } with typechecker.Duplicators
+ } with typechecker.Duplicators {
+ private val (castfrom, castto) = casts.unzip
+ private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList)
+
+ class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
+ override def castType(tree: Tree, pt: Type): Tree = {
+ // log(" expected type: " + pt)
+ // log(" tree type: " + tree.tpe)
+ tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ // log(" tree type: " + tree.tpe)
+ val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
+ val casttpe = CastMap(tree.tpe)
+ if (casttpe <:< pt) gen.mkCast(tree, casttpe)
+ else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
+ else tree
+ } else tree
+ ntree.tpe = null
+ ntree
+ }
+ }
+
+ protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+
+ }
/** A tree symbol substituter that substitutes on type skolems.
* If a type parameter is a skolem, it looks for the original
@@ -1475,14 +1528,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- debuglog("Normalized member: " + symbol + ", target: " + target)
- if (target.isDeferred || conflicting(typeEnv(symbol))) {
+ val constraints = satisfiabilityConstraints(typeEnv(symbol))
+ log("constraints: " + constraints)
+ if (target.isDeferred || constraints == None) {
deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
- }
- else {
+ } else {
// we have an rhs, specialize it
val tree1 = reportTypeError {
- duplicateBody(ddef, target)
+ duplicateBody(ddef, target, constraints.get)
}
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
@@ -1546,7 +1599,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
- val d = new Duplicator
+ val d = new Duplicator(emptyEnv)
val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
@@ -1571,12 +1624,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
super.transform(tree)
}
}
-
- private def duplicateBody(tree: DefDef, source: Symbol) = {
+
+ /** Duplicate the body of the given method `tree` to the new symbol `source`.
+ *
+ * Knowing that the method can be invoked only in the `castmap` type environment,
+ * this method will insert casts for all the expressions of types mappend in the
+ * `castmap`.
+ */
+ private def duplicateBody(tree: DefDef, source: Symbol, castmap: TypeEnv = emptyEnv) = {
val symbol = tree.symbol
val meth = addBody(tree, source)
- val d = new Duplicator
+ val d = new Duplicator(castmap)
debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 6386273c9d..63d1bd0e9f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -21,7 +21,7 @@ abstract class Duplicators extends Analyzer {
def retyped(context: Context, tree: Tree): Tree = {
resetClassOwners
- (new BodyDuplicator(context)).typed(tree)
+ (newBodyDuplicator(context)).typed(tree)
}
/** Retype the given tree in the given context. Use this method when retyping
@@ -37,15 +37,17 @@ abstract class Duplicators extends Analyzer {
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
- (new BodyDuplicator(context)).typed(tree)
+ newBodyDuplicator(context).typed(tree)
}
+ protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+
def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (new BodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
+ (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
- new BodyDuplicator(context)
+ newBodyDuplicator(context)
private def resetClassOwners() {
oldClassOwner = null
@@ -209,6 +211,11 @@ abstract class Duplicators extends Analyzer {
}
}
+ /** Optionally cast this tree into some other type, if required.
+ * Unless overridden, just returns the tree.
+ */
+ def castType(tree: Tree, pt: Type): Tree = tree
+
/** Special typer method for re-type checking trees. It expects a typed tree.
* Returns a typed tree that has fresh symbols for all definitions in the original tree.
*
@@ -319,10 +326,10 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1), mode, pt)
case This(_) =>
- // log("selection on this, plain: " + tree)
+ debuglog("selection on this, plain: " + tree)
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- val tree1 = super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ val tree1 = super.typed(ntree, mode, pt)
// log("plain this typed to: " + tree1)
tree1
/* no longer needed, because Super now contains a This(...)
@@ -358,16 +365,18 @@ abstract class Duplicators extends Analyzer {
case EmptyTree =>
// no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
tree
-
+
case _ =>
- // log("Duplicators default case: " + tree.summaryString + " -> " + tree)
+ debuglog("Duplicators default case: " + tree.summaryString)
+ debuglog(" ---> " + tree)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
- tree.tpe = null
- super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ super.typed(ntree, mode, pt)
}
}
+
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index e1fb683aa9..177d1ddf19 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -88,9 +88,11 @@ trait EtaExpansion { self: Analyzer =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe))
- // zipAll: with repeated params, there might be more args than params
- val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) }
+ val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift
+ val newArgs = mapWithIndex(args) { (arg, i) =>
+ // with repeated params, there might be more or fewer args than params
+ liftout(arg, byName(i).getOrElse(false))
+ }
treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index f7e00109ae..68782379a6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1125,7 +1125,7 @@ trait Implicits {
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: Infoss = {
- Statistics.incCounter(implicitCacheHits)
+ Statistics.incCounter(implicitCacheAccs)
implicitsCache get pt match {
case Some(implicitInfoss) =>
Statistics.incCounter(implicitCacheHits)
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 9b8ddffb49..f99d0e733b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -2362,6 +2362,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("enum bool "+ tp)
Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
// TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
// patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
None
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index daae69590f..f67cec730b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -124,7 +124,15 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
"unless it is overridden by a member declared `abstract' and `override'");
+ } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
+ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
+ val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
+ intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach {
+ absSym =>
+ unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
+ }
}
+
if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner))
ensureAccessor(sel)
else sel
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 69d3fd7f47..acf1b3dc59 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1833,7 +1833,7 @@ trait Typers extends Modes with Adaptations with Tags {
val params = fn.tpe.params
val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
+ assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -2036,7 +2036,7 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
// At this point in AnyVal there is no supercall, which will blow up
// in computeParamAliases; there's nothing to be computed for Anyval anyway.
if (meth.isPrimaryConstructor)
@@ -4025,7 +4025,7 @@ trait Typers extends Modes with Adaptations with Tags {
} else {
context.enclMethod.returnsSeen = true
val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
-
+
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -5131,7 +5131,7 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
- val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass)
+ val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass))
Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
@@ -5187,7 +5187,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
- Statistics.popTimerClass(byTypeNanos, startByType)
+ Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -5375,10 +5375,11 @@ object TypersStats {
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
- val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos)
- val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos)
- val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos)
- val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos)
- val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter(""))
- val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos)
+ val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
+ val byTypeStack = Statistics.newTimerStack()
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index 039a57041c..e895c94599 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object IntMapUtils extends BitOperations.Int {
def branchMask(i: Int, j: Int) = highestOneBit(i ^ j)
- def join[T](p1 : Int, t1 : IntMap[T], p2 : Int, t2 : IntMap[T]) : IntMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) IntMap.Bin(p, m, t1, t2)
- else IntMap.Bin(p, m, t2, t1);
+ else IntMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) : IntMap[T] = (left, right) match {
- case (left, IntMap.Nil) => left;
- case (IntMap.Nil, right) => right;
- case (left, right) => IntMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match {
+ case (left, IntMap.Nil) => left
+ case (IntMap.Nil, right) => right
+ case (left, right) => IntMap.Bin(prefix, mask, left, right)
}
}
@@ -50,9 +50,9 @@ object IntMap {
}
def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
- def apply[T](elems : (Int, T)*) : IntMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def apply[T](elems: (Int, T)*): IntMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends IntMap[Nothing] {
// Important! Without this equals method in place, an infinite
@@ -66,15 +66,15 @@ object IntMap {
}
}
- private[immutable] case class Tip[+T](key : Int, value : T) extends IntMap[T]{
+ private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{
def withValue[S](s: S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]];
- else IntMap.Tip(key, s);
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]]
+ else IntMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) extends IntMap[T]{
- def bin[S](left : IntMap[S], right : IntMap[S]) : IntMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]];
- else IntMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] {
+ def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]]
+ else IntMap.Bin[S](prefix, mask, left, right)
}
}
@@ -83,60 +83,60 @@ object IntMap {
import IntMap._
// Iterator over a non-empty IntMap.
-private[immutable] abstract class IntMapIterator[V, T](it : IntMap[V]) extends AbstractIterator[T] {
+private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and
// one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 33 and
- var index = 0;
- var buffer = new Array[AnyRef](33);
+ var index = 0
+ var buffer = new Array[AnyRef](33)
def pop = {
- index -= 1;
- buffer(index).asInstanceOf[IntMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[IntMap[V]]
}
- def push(x : IntMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: IntMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : IntMap.Tip[V]) : T;
+ def valueOf(tip: IntMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop match {
case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case IntMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t@IntMap.Tip(_, _) => valueOf(t);
+ case t@IntMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap
// and don't return an IntMapIterator for IntMap.Nil.
- case IntMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case IntMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class IntMapEntryIterator[V](it : IntMap[V]) extends IntMapIterator[V, (Int, V)](it){
- def valueOf(tip : IntMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) {
+ def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class IntMapValueIterator[V](it : IntMap[V]) extends IntMapIterator[V, V](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.value
+private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.value
}
-private[immutable] class IntMapKeyIterator[V](it : IntMap[V]) extends IntMapIterator[V, Int](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.key
+private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.key
}
import IntMap._
@@ -145,7 +145,7 @@ import IntMap._
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
* by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
*
- * Note: This class is as of 2.8 largely superseded by HashMap.
+ * '''Note:''' This class is as of 2.8 largely superseded by HashMap.
*
* @tparam T type of the values associated with integer keys.
*
@@ -155,17 +155,16 @@ import IntMap._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-sealed abstract class IntMap[+T]
-extends AbstractMap[Int, T]
+sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
with Map[Int, T]
with MapLike[Int, T, IntMap[T]] {
- override def empty: IntMap[T] = IntMap.Nil;
+ override def empty: IntMap[T] = IntMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Int, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Int, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -173,109 +172,112 @@ extends AbstractMap[Int, T]
*
* @return an iterator over pairs of integer keys and corresponding values.
*/
- def iterator : Iterator[(Int, T)] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapEntryIterator(this);
+ def iterator: Iterator[(Int, T)] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Int, T)) => U) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
- case IntMap.Tip(key, value) => f((key, value));
- case IntMap.Nil => {};
+ override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
+ case IntMap.Tip(key, value) => f((key, value))
+ case IntMap.Nil =>
}
- override def keysIterator : Iterator[Int] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapKeyIterator(this);
+ override def keysIterator: Iterator[Int] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapKeyIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachKey(f : Int => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case IntMap.Tip(key, _) => f(key);
- case IntMap.Nil => {}
+ final def foreachKey(f: Int => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case IntMap.Tip(key, _) => f(key)
+ case IntMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapValueIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case IntMap.Tip(_, value) => f(value);
- case IntMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case IntMap.Tip(_, value) => f(value)
+ case IntMap.Nil =>
}
override def stringPrefix = "IntMap"
- override def isEmpty = this == IntMap.Nil;
+ override def isEmpty = this == IntMap.Nil
- override def filter(f : ((Int, T)) => Boolean) : IntMap[T] = this match {
+ override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case IntMap.Tip(key, value) =>
if (f((key, value))) this
- else IntMap.Nil;
- case IntMap.Nil => IntMap.Nil;
+ else IntMap.Nil
+ case IntMap.Nil => IntMap.Nil
}
- def transform[S](f : (Int, T) => S) : IntMap[S] = this match {
- case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t@IntMap.Tip(key, value) => t.withValue(f(key, value));
- case IntMap.Nil => IntMap.Nil;
+ def transform[S](f: (Int, T) => S): IntMap[S] = this match {
+ case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@IntMap.Tip(key, value) => t.withValue(f(key, value))
+ case IntMap.Nil => IntMap.Nil
}
- final override def size : Int = this match {
- case IntMap.Nil => 0;
- case IntMap.Tip(_, _) => 1;
- case IntMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case IntMap.Nil => 0
+ case IntMap.Tip(_, _) => 1
+ case IntMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Int) : Option[T] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case IntMap.Nil => None;
+ final def get(key: Int): Option[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case IntMap.Nil => None
}
- final override def getOrElse[S >: T](key : Int, default : =>S) : S = this match {
- case IntMap.Nil => default;
- case IntMap.Tip(key2, value) => if (key == key2) value else default;
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Int, default: => S): S = this match {
+ case IntMap.Nil => default
+ case IntMap.Tip(key2, value) => if (key == key2) value else default
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Int) : T = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case IntMap.Nil => sys.error("key not found");
+ final override def apply(key: Int): T = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case IntMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Int, value : S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
- else IntMap.Bin(prefix, mask, left, right.updated(key, value));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, value);
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ override def updated[S >: T](key: Int, value: S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
+ else IntMap.Bin(prefix, mask, left, right.updated(key, value))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, value)
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
/**
@@ -284,7 +286,7 @@ extends AbstractMap[Int, T]
* Equivalent to:
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -295,24 +297,26 @@ extends AbstractMap[Int, T]
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Int, value : S, f : (T, S) => S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, f(value2, value));
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, f(value2, value))
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
- def - (key : Int) : IntMap[T] = this match {
+ def - (key: Int): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case IntMap.Tip(key2, _) =>
- if (key == key2) IntMap.Nil;
- else this;
- case IntMap.Nil => IntMap.Nil;
+ if (key == key2) IntMap.Nil
+ else this
+ case IntMap.Nil => IntMap.Nil
}
/**
@@ -324,7 +328,7 @@ extends AbstractMap[Int, T]
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Int, T) => Option[S]) : IntMap[S] = this match {
+ def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match {
case IntMap.Bin(prefix, mask, left, right) =>
val newleft = left.modifyOrRemove(f)
val newright = right.modifyOrRemove(f)
@@ -350,25 +354,25 @@ extends AbstractMap[Int, T]
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{
+ def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{
case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
+ if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x));
- case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (IntMap.Nil, x) => x;
- case (x, IntMap.Nil) => x;
+ case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x))
+ case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (IntMap.Nil, x) => x
+ case (x, IntMap.Nil) => x
}
/**
@@ -382,13 +386,13 @@ extends AbstractMap[Int, T]
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : IntMap[S], f : (Int, T, S) => R) : IntMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match {
case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
if (!hasMatch(p2, p1, m1)) IntMap.Nil
else if (zero(p2, m1)) l1.intersectionWith(that, f)
else r1.intersectionWith(that, f)
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
if (!hasMatch(p1, p2, m2)) IntMap.Nil
else if (zero(p1, m2)) this.intersectionWith(l2, f)
@@ -413,15 +417,16 @@ extends AbstractMap[Int, T]
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : IntMap[R]) : IntMap[T] = this.intersectionWith(that, (key : Int, value : T, value2 : R) => value);
+ def intersection[R](that: IntMap[R]): IntMap[T] =
+ this.intersectionWith(that, (key: Int, value: T, value2: R) => value)
- def ++[S >: T](that : IntMap[S]) =
+ def ++[S >: T](that: IntMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
/**
* The entry with the lowest key value considered in unsigned order.
*/
- final def firstKey : Int = this match {
+ final def firstKey: Int = this match {
case Bin(_, _, l, r) => l.firstKey
case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
@@ -430,7 +435,7 @@ extends AbstractMap[Int, T]
/**
* The entry with the highest key value considered in unsigned order.
*/
- final def lastKey : Int = this match {
+ final def lastKey: Int = this match {
case Bin(_, _, l, r) => r.lastKey
case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 8a316f37de..002027b162 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object LongMapUtils extends BitOperations.Long {
def branchMask(i: Long, j: Long) = highestOneBit(i ^ j)
- def join[T](p1 : Long, t1 : LongMap[T], p2 : Long, t2 : LongMap[T]) : LongMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) LongMap.Bin(p, m, t1, t2)
- else LongMap.Bin(p, m, t2, t1);
+ else LongMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) : LongMap[T] = (left, right) match {
- case (left, LongMap.Nil) => left;
- case (LongMap.Nil, right) => right;
- case (left, right) => LongMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match {
+ case (left, LongMap.Nil) => left
+ case (LongMap.Nil, right) => right
+ case (left, right) => LongMap.Bin(prefix, mask, left, right)
}
}
@@ -49,29 +49,29 @@ object LongMap {
def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
}
- def empty[T] : LongMap[T] = LongMap.Nil;
- def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
- def apply[T](elems : (Long, T)*) : LongMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def empty[T]: LongMap[T] = LongMap.Nil
+ def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value)
+ def apply[T](elems: (Long, T)*): LongMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends LongMap[Nothing] {
// Important, don't remove this! See IntMap for explanation.
override def equals(that : Any) = that match {
- case (that : AnyRef) if (this eq that) => true;
- case (that : LongMap[_]) => false; // The only empty LongMaps are eq Nil
- case that => super.equals(that);
+ case (that: AnyRef) if (this eq that) => true
+ case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil
+ case that => super.equals(that)
}
- };
+ }
- private[immutable] case class Tip[+T](key : Long, value : T) extends LongMap[T]{
- def withValue[S](s : S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]];
- else LongMap.Tip(key, s);
+ private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] {
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]]
+ else LongMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) extends LongMap[T]{
- def bin[S](left : LongMap[S], right : LongMap[S]) : LongMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]];
- else LongMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] {
+ def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]]
+ else LongMap.Bin[S](prefix, mask, left, right)
}
}
}
@@ -79,64 +79,62 @@ object LongMap {
import LongMap._
// Iterator over a non-empty LongMap.
-private[immutable] abstract class LongMapIterator[V, T](it : LongMap[V]) extends AbstractIterator[T] {
+private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and
// one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 65
- var index = 0;
- var buffer = new Array[AnyRef](65);
+ var index = 0
+ var buffer = new Array[AnyRef](65)
def pop() = {
- index -= 1;
- buffer(index).asInstanceOf[LongMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[LongMap[V]]
}
- def push(x : LongMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: LongMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
push(it);
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : LongMap.Tip[V]) : T;
+ def valueOf(tip: LongMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop() match {
case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case LongMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t@LongMap.Tip(_, _) => valueOf(t);
+ case t@LongMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap
// and don't return an LongMapIterator for LongMap.Nil.
- case LongMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case LongMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class LongMapEntryIterator[V](it : LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
- def valueOf(tip : LongMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
+ def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class LongMapValueIterator[V](it : LongMap[V]) extends LongMapIterator[V, V](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.value;
+private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.value
}
-private[immutable] class LongMapKeyIterator[V](it : LongMap[V]) extends LongMapIterator[V, Long](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.key;
+private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.key
}
-import LongMap._;
-
/**
* Specialised immutable map structure for long keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
@@ -157,12 +155,12 @@ extends AbstractMap[Long, T]
with Map[Long, T]
with MapLike[Long, T, LongMap[T]] {
- override def empty: LongMap[T] = LongMap.Nil;
+ override def empty: LongMap[T] = LongMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Long, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Long, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -171,22 +169,22 @@ extends AbstractMap[Long, T]
* @return an iterator over pairs of long keys and corresponding values.
*/
def iterator: Iterator[(Long, T)] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapEntryIterator(this);
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Long, T)) => U) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
+ override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
case LongMap.Tip(key, value) => f((key, value));
- case LongMap.Nil => {};
+ case LongMap.Nil =>
}
- override def keysIterator : Iterator[Long] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapKeyIterator(this);
+ override def keysIterator: Iterator[Long] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapKeyIterator(this)
}
/**
@@ -195,15 +193,15 @@ extends AbstractMap[Long, T]
*
* @param f The loop body
*/
- final def foreachKey(f : Long => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case LongMap.Tip(key, _) => f(key);
- case LongMap.Nil => {}
+ final def foreachKey(f: Long => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case LongMap.Tip(key, _) => f(key)
+ case LongMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapValueIterator(this)
}
/**
@@ -212,67 +210,70 @@ extends AbstractMap[Long, T]
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case LongMap.Tip(_, value) => f(value);
- case LongMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case LongMap.Tip(_, value) => f(value)
+ case LongMap.Nil =>
}
override def stringPrefix = "LongMap"
- override def isEmpty = this == LongMap.Nil;
+ override def isEmpty = this == LongMap.Nil
- override def filter(f : ((Long, T)) => Boolean) : LongMap[T] = this match {
+ override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) =>
if (f((key, value))) this
- else LongMap.Nil;
- case LongMap.Nil => LongMap.Nil;
+ else LongMap.Nil
+ case LongMap.Nil => LongMap.Nil
}
- def transform[S](f : (Long, T) => S) : LongMap[S] = this match {
- case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t@LongMap.Tip(key, value) => t.withValue(f(key, value));
- case LongMap.Nil => LongMap.Nil;
+ def transform[S](f: (Long, T) => S): LongMap[S] = this match {
+ case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@LongMap.Tip(key, value) => t.withValue(f(key, value))
+ case LongMap.Nil => LongMap.Nil
}
- final override def size : Int = this match {
- case LongMap.Nil => 0;
- case LongMap.Tip(_, _) => 1;
- case LongMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case LongMap.Nil => 0
+ case LongMap.Tip(_, _) => 1
+ case LongMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Long) : Option[T] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case LongMap.Nil => None;
+ final def get(key: Long): Option[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case LongMap.Nil => None
}
- final override def getOrElse[S >: T](key : Long, default : =>S) : S = this match {
- case LongMap.Nil => default;
- case LongMap.Tip(key2, value) => if (key == key2) value else default;
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Long, default: => S): S = this match {
+ case LongMap.Nil => default
+ case LongMap.Tip(key2, value) => if (key == key2) value else default
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Long) : T = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case LongMap.Nil => sys.error("key not found");
+ final override def apply(key: Long): T = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case LongMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Long, value : S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
- else LongMap.Bin(prefix, mask, left, right.updated(key, value));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, value);
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ override def updated[S >: T](key: Long, value: S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
+ else LongMap.Bin(prefix, mask, left, right.updated(key, value))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, value)
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
/**
@@ -281,7 +282,7 @@ extends AbstractMap[Long, T]
* Equivalent to
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -292,24 +293,26 @@ extends AbstractMap[Long, T]
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Long, value : S, f : (T, S) => S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, f(value2, value));
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, f(value2, value))
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
- def -(key : Long) : LongMap[T] = this match {
+ def -(key: Long): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case LongMap.Tip(key2, _) =>
- if (key == key2) LongMap.Nil;
- else this;
- case LongMap.Nil => LongMap.Nil;
+ if (key == key2) LongMap.Nil
+ else this
+ case LongMap.Nil => LongMap.Nil
}
/**
@@ -321,21 +324,21 @@ extends AbstractMap[Long, T]
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Long, T) => Option[S]) : LongMap[S] = this match {
+ def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val newleft = left.modifyOrRemove(f);
- val newright = right.modifyOrRemove(f);
- if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]];
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]]
else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) => f(key, value) match {
- case None => LongMap.Nil;
+ case None => LongMap.Nil
case Some(value2) =>
//hack to preserve sharing
if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]]
- else LongMap.Tip(key, value2);
+ else LongMap.Tip(key, value2)
}
- case LongMap.Nil => LongMap.Nil;
+ case LongMap.Nil => LongMap.Nil
}
/**
@@ -346,25 +349,25 @@ extends AbstractMap[Long, T]
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{
+ def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{
case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
+ if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)); // TODO: remove [S] when SI-5548 is fixed
- case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (LongMap.Nil, x) => x;
- case (x, LongMap.Nil) => x;
+ case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when SI-5548 is fixed
+ case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (LongMap.Nil, x) => x
+ case (x, LongMap.Nil) => x
}
/**
@@ -378,27 +381,27 @@ extends AbstractMap[Long, T]
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : LongMap[S], f : (Long, T, S) => R) : LongMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match {
case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) LongMap.Nil;
- else if (zero(p2, m1)) l1.intersectionWith(that, f);
- else r1.intersectionWith(that, f);
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ if (!hasMatch(p2, p1, m1)) LongMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
- if (!hasMatch(p1, p2, m2)) LongMap.Nil;
- else if (zero(p1, m2)) this.intersectionWith(l2, f);
- else this.intersectionWith(r2, f);
+ if (!hasMatch(p1, p2, m2)) LongMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
}
case (LongMap.Tip(key, value), that) => that.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value, value2));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value, value2))
}
case (_, LongMap.Tip(key, value)) => this.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value2, value));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value2, value))
}
- case (_, _) => LongMap.Nil;
+ case (_, _) => LongMap.Nil
}
/**
@@ -409,9 +412,10 @@ extends AbstractMap[Long, T]
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : LongMap[R]) : LongMap[T] = this.intersectionWith(that, (key : Long, value : T, value2 : R) => value);
+ def intersection[R](that: LongMap[R]): LongMap[T] =
+ this.intersectionWith(that, (key: Long, value: T, value2: R) => value)
- def ++[S >: T](that : LongMap[S]) =
+ def ++[S >: T](that: LongMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
}
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index d7f5a57f50..accda5b8f7 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -41,8 +41,8 @@ class Breaks {
}
}
- trait TryBlock {
- def catchBreak(onBreak: => Unit): Unit
+ sealed trait TryBlock[T] {
+ def catchBreak(onBreak: =>T): T
}
/**
@@ -57,8 +57,8 @@ class Breaks {
* }
* }}}
*/
- def tryBreakable(op: => Unit) = new TryBlock {
- def catchBreak(onBreak: => Unit) = try {
+ def tryBreakable[T](op: =>T) = new TryBlock[T] {
+ def catchBreak(onBreak: =>T) = try {
op
} catch {
case ex: BreakControl =>
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 8cbe3064ef..64afb1f10f 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -24,8 +24,9 @@ package scala.util.control
* try {
* // Body might throw arbitrarily
* } catch {
- * case ce : ControlThrowable => throw ce // propagate
- * case t : Exception => log(t) // log and suppress
+ * case c: ControlThrowable => throw c // propagate
+ * case t: Exception => log(t) // log and suppress
+ * }
* }}}
*
* @author Miles Sabin
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index 2223a6db0f..2aa9a99054 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -336,7 +336,6 @@ import ILGenerator._
emitSpecialLabel(Label.Try)
val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
excStack.push(Label.Try, endExc)
- return endExc
}
/** Begins a catch block. */
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index b62a92cbd7..b797c71f6d 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -121,7 +121,7 @@ trait Types extends base.Types { self: Universe =>
* class C extends p.D[Int]
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
- * }}}
+ * }}}
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type
@@ -171,6 +171,15 @@ trait Types extends base.Types { self: Universe =>
*/
def widen: Type
+ /** Map to a singleton type which is a subtype of this type.
+ * The fallback implemented here gives:
+ * {{{
+ * T.narrow = (T {}).this.type
+ * }}}
+ * Overridden where we know more about where types come from.
+ */
+ def narrow: Type
+
/** The string discriminator of this type; useful for debugging */
def kind: String
}
@@ -365,4 +374,3 @@ trait Types extends base.Types { self: Universe =>
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type
}
-
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 18adab7c68..5ae8f22c64 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -129,11 +129,15 @@ abstract class SymbolTable extends makro.Universe
// sigh, this has to be public or atPhase doesn't inline.
var phStack: List[Phase] = Nil
- private var ph: Phase = NoPhase
- private var per = NoPeriod
+ private[this] var ph: Phase = NoPhase
+ private[this] var per = NoPeriod
final def atPhaseStack: List[Phase] = phStack
- final def phase: Phase = ph
+ final def phase: Phase = {
+ if (Statistics.hotEnabled)
+ Statistics.incCounter(SymbolTableStats.phaseCounter)
+ ph
+ }
def atPhaseStackMessage = atPhaseStack match {
case Nil => ""
@@ -330,3 +334,7 @@ abstract class SymbolTable extends makro.Universe
*/
def isCompilerUniverse = false
}
+
+object SymbolTableStats {
+ val phaseCounter = Statistics.newCounter("#phase calls")
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 4b0ceeb86b..a3893a0236 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -645,6 +645,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def flags: Long = {
+ if (Statistics.hotEnabled) Statistics.incCounter(flagsCount)
val fs = _rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
@@ -936,7 +937,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
- def owner: Symbol = rawowner
+ def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
+ rawowner
+ }
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
@@ -2324,7 +2329,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawname: TermName = initName
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
def name_=(name: Name) {
if (name != rawname) {
log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
@@ -2493,11 +2501,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def companionClass =
flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
- override def owner = (
+ override def owner = {
+ Statistics.incCounter(ownerCount)
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
- )
- override def name: TermName = (
+ }
+ override def name: TermName = {
+ Statistics.incCounter(nameCount)
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
@@ -2505,7 +2515,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
}
implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
@@ -2576,7 +2586,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// cloneSymbolImpl still abstract in TypeSymbol.
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
@@ -2888,10 +2901,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thisTypeCache
}
- override def owner: Symbol =
+ override def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
if (needsFlatClasses) rawowner.owner else rawowner
+ }
- override def name: TypeName = (
+ override def name: TypeName = {
+ Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
@@ -2899,7 +2915,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -3194,4 +3210,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
object SymbolsStats {
val typeSymbolCount = Statistics.newCounter("#type symbols")
val classSymbolCount = Statistics.newCounter("#class symbols")
+ val flagsCount = Statistics.newCounter("#flags ops")
+ val ownerCount = Statistics.newCounter("#owner ops")
+ val nameCount = Statistics.newCounter("#name ops")
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index d4b895bcb4..4cf2cceb81 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -679,7 +679,7 @@ trait Types extends api.Types { self: SymbolTable =>
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
Statistics.incCounter(asSeenFromCount)
- val start = Statistics.startTimer(asSeenFromNanos)
+ val start = Statistics.pushTimer(typeOpsStack, asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -687,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
- Statistics.stopTimer(asSeenFromNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
}
@@ -826,12 +826,12 @@ trait Types extends api.Types { self: SymbolTable =>
def stat_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -839,12 +839,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def weak_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -1018,7 +1018,7 @@ trait Types extends api.Types { self: SymbolTable =>
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
Statistics.incCounter(findMemberCount)
- val start = Statistics.startTimer(findMemberNanos)
+ val start = Statistics.pushTimer(typeOpsStack, findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1045,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1091,7 +1091,7 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
if (member == NoSymbol) Statistics.incCounter(noMemberCount)
@@ -1534,11 +1534,17 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
Statistics.incCounter(compoundBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
- else
- compoundBaseTypeSeq(tpe)
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache =
+ if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
// [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
// when compiling with
// scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
@@ -2390,8 +2396,13 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
Statistics.incCounter(typerefBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
}
}
if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
@@ -6306,13 +6317,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case _ =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6454,13 +6465,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case ts0 =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6884,11 +6895,13 @@ object TypesStats {
val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
- val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos)
- val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos)
- val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos)
- val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos)
+ val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
+ val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val typeOpsStack = Statistics.newTimerStack()
}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 57c9e98174..f69530c40d 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -4,6 +4,8 @@ import collection.mutable
object Statistics {
+ type TimerSnapshot = (Long, Long)
+
/** If enabled, increment counter by one */
@inline final def incCounter(c: Counter) {
if (_enabled && c != null) c.value += 1
@@ -30,20 +32,20 @@ object Statistics {
}
/** If enabled, start timer */
- @inline final def startTimer(tm: Timer): (Long, Long) =
+ @inline final def startTimer(tm: Timer): TimerSnapshot =
if (_enabled && tm != null) tm.start() else null
/** If enabled, stop timer */
- @inline final def stopTimer(tm: Timer, start: (Long, Long)) {
+ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) {
if (_enabled && tm != null) tm.stop(start)
}
/** If enabled, push and start a new timer in timer stack */
- @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) =
- if (_enabled && timers != null) timers.push(cls) else null
+ @inline final def pushTimer(timers: TimerStack, timer: StackableTimer): TimerSnapshot =
+ if (_enabled && timers != null) timers.push(timer) else null
/** If enabled, stop and pop timer from timer stack */
- @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) {
+ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) {
if (_enabled && timers != null) timers.pop(prev)
}
@@ -73,6 +75,13 @@ object Statistics {
*/
def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+ /** Create a new stackable that shows as `prefix` and is active
+ * in the same phases as its base timer. Stackable timers are subtimers
+ * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * durations.
+ */
+ def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
+
/** Create a new view that shows as `prefix` and is active in given phases.
* The view always reflects the current value of `quant` as a quantity.
*/
@@ -86,20 +95,27 @@ quant)
/** Same as newQuantMap, where the key type is fixed to be Class[_] */
def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
- /** Create a new timer stack map, indexed by Class[_]. */
- def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying)
+ /** Create a new timer stack */
+ def newTimerStack() = new TimerStack()
def allQuantities: Iterable[Quantity] =
- for ((q, _) <- qs if !q.isInstanceOf[SubQuantity];
+ for ((_, q) <- qs if q.underlying == q;
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
private def showPercent(x: Double, base: Double) =
if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ /** The base trait for quantities.
+ * Quantities with non-empty prefix are printed in the statistics info.
+ */
trait Quantity {
- qs += (this -> ())
+ if (prefix.nonEmpty) {
+ val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+ qs(key) = this
+ }
val prefix: String
val phases: Seq[String]
+ def underlying: Quantity = this
def showAt(phase: String) = phases.isEmpty || (phases contains phase)
def line = f"$prefix%-30s: ${this}"
val children = new mutable.ListBuffer[Quantity]
@@ -123,7 +139,7 @@ quant)
override def toString = quant.toString
}
- private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
override def toString =
if (value == 0) "0"
else {
@@ -142,26 +158,32 @@ quant)
value + showPercent(value, underlying.value)
}
- class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] {
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
var nanos: Long = 0
var timings = 0
- def compare(that: Timer): Int =
- if (this.nanos < that.nanos) -1
- else if (this.nanos > that.nanos) 1
- else 0
def start() = {
(nanos, System.nanoTime())
}
- def stop(prev: (Long, Long)) {
+ def stop(prev: TimerSnapshot) {
val (nanos0, start) = prev
nanos = nanos0 + System.nanoTime() - start
timings += 1
}
- override def toString = s"$timings spans, ${nanos/1000}ms"
+ protected def show(ns: Long) = s"${ns/1000}ms"
+ override def toString = s"$timings spans, ${show(nanos)}"
}
- private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
- override def toString: String = super.toString + showPercent(nanos, underlying.nanos)
+ class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos)
+ }
+
+ class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] {
+ var specificNanos: Long = 0
+ def compare(that: StackableTimer): Int =
+ if (this.specificNanos < that.specificNanos) -1
+ else if (this.specificNanos > that.specificNanos) 1
+ else 0
+ override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
}
/** A mutable map quantity where missing elements are automatically inserted
@@ -183,23 +205,25 @@ quant)
}.mkString(", ")
}
- /** A mutable map quantity that takes class keys to subtimer values, relative to
- * some `underlying` timer. In addition, class timers can be pushed and popped.
- * Pushing the timer for a class means stopping the currently active timer.
+ /** A stack of timers, all active, where a timer's specific "clock"
+ * is stopped as long as it is buried by some other timer in the stack, but
+ * its aggregate clock keeps on ticking.
*/
- class ByClassTimerStack(prefix: String, val underlying: Timer)
- extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity {
- private var elems: List[(Timer, Long)] = Nil
- def push(cls: Class[_]): (Long, Long) = {
- val topTimer = this(cls)
- elems = (topTimer, 0L) :: elems
- topTimer.start()
+ class TimerStack {
+ private var elems: List[(StackableTimer, Long)] = Nil
+ /** Start given timer and push it onto the stack */
+ def push(t: StackableTimer): TimerSnapshot = {
+ elems = (t, 0L) :: elems
+ t.start()
}
- def pop(prev: (Long, Long)) = {
+ /** Stop and pop top timer in stack
+ */
+ def pop(prev: TimerSnapshot) = {
val (nanos0, start) = prev
val duration = System.nanoTime() - start
val (topTimer, nestedNanos) :: rest = elems
- topTimer.nanos = nanos0 + duration - nestedNanos
+ topTimer.nanos = nanos0 + duration
+ topTimer.specificNanos += duration - nestedNanos
topTimer.timings += 1
elems = rest match {
case (outerTimer, outerNested) :: elems1 =>
@@ -211,7 +235,7 @@ quant)
}
private var _enabled = false
- private val qs = new mutable.WeakHashMap[Quantity, Unit]
+ private val qs = new mutable.HashMap[String, Quantity]
def enabled = _enabled
def enabled_=(cond: Boolean) = {
@@ -229,4 +253,9 @@ quant)
_enabled = true
}
}
+
+ /** replace rhs with enabled and rebuild to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
}
diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check
new file mode 100644
index 0000000000..b53bbdbd15
--- /dev/null
+++ b/test/files/neg/t4842.check
@@ -0,0 +1,7 @@
+t4842.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+two errors found
diff --git a/test/files/neg/t4842b.scala b/test/files/neg/t4842.scala
index a7996cc061..c6244efda7 100644
--- a/test/files/neg/t4842b.scala
+++ b/test/files/neg/t4842.scala
@@ -1,3 +1,7 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
+
class TypeArg[X](val x: X)(a: AnyRef) {
def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
}
diff --git a/test/files/neg/t4842a.check b/test/files/neg/t4842a.check
deleted file mode 100644
index 39d77bfc48..0000000000
--- a/test/files/neg/t4842a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842a.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this(x: Int) = this(new { println(Foo.this)}) // error
- ^
-one error found
diff --git a/test/files/neg/t4842a.scala b/test/files/neg/t4842a.scala
deleted file mode 100644
index 78360effb4..0000000000
--- a/test/files/neg/t4842a.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Foo (x: AnyRef) {
- def this(x: Int) = this(new { println(Foo.this)}) // error
-}
diff --git a/test/files/neg/t4842b.check b/test/files/neg/t4842b.check
deleted file mode 100644
index c7ccd5e059..0000000000
--- a/test/files/neg/t4842b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842b.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
- ^
-one error found
diff --git a/test/files/neg/t4989.check b/test/files/neg/t4989.check
new file mode 100644
index 0000000000..814507fc3f
--- /dev/null
+++ b/test/files/neg/t4989.check
@@ -0,0 +1,7 @@
+t4989.scala:14: error: method print in class A cannot be directly accessed from class C because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+t4989.scala:18: error: method print in class A cannot be directly accessed from trait T because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+two errors found
diff --git a/test/files/neg/t4989.scala b/test/files/neg/t4989.scala
new file mode 100644
index 0000000000..e7ff80ed74
--- /dev/null
+++ b/test/files/neg/t4989.scala
@@ -0,0 +1,68 @@
+abstract class A0 {
+ def print(): String
+}
+
+class A extends A0 {
+ def print(): String = "A"
+}
+
+abstract class B extends A {
+ def print() : String
+}
+
+class C extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+trait T extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+class D extends A {
+ override def print(): String = super.print() // okay
+}
+
+
+// it's okay do this when trait are in the mix, as the
+// suitable super accessor methods are used.
+object ConcreteMethodAndIntermediaryAreTraits {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ def print(): String = super.print() // okay
+ }
+}
+
+object IntermediaryIsTrait {
+ class T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
+
+object ConcreteMethodIsTrait {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ abstract class T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
diff --git a/test/files/neg/t5761.scala b/test/files/neg/t5761.scala
new file mode 100644
index 0000000000..040c4eb75a
--- /dev/null
+++ b/test/files/neg/t5761.scala
@@ -0,0 +1,16 @@
+class D[-A](x: A) { }
+
+object Test1 {
+ println(new D[Int]{}) // crash
+}
+
+object Test2 {
+ println(new D[Int]()) // no crash
+ println(new D[Int]{}) // crash
+}
+
+object Test3 {
+ new Tread("sth") { }.run()
+}
+
+
diff --git a/test/files/pos/spec-params-new.scala b/test/files/pos/spec-params-new.scala
index 661e686f0e..959ce1591c 100644
--- a/test/files/pos/spec-params-new.scala
+++ b/test/files/pos/spec-params-new.scala
@@ -31,4 +31,4 @@ class Foo[@specialized A: ClassTag] {
val xs = new Array[A](1)
xs(0) = x
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t5968.flags b/test/files/pos/t5968.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5968.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala
new file mode 100644
index 0000000000..0093f84fc0
--- /dev/null
+++ b/test/files/pos/t5968.scala
@@ -0,0 +1,8 @@
+object X {
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ // SI-5986 spurious exhaustivity warning here
+ }
+}
+
diff --git a/test/files/run/t4809.scala b/test/files/run/t4809.scala
new file mode 100644
index 0000000000..b30d80562f
--- /dev/null
+++ b/test/files/run/t4809.scala
@@ -0,0 +1,34 @@
+
+
+import scala.util.control.Breaks._
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val x = tryBreakable {
+ break
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(x == 3, x)
+
+ val y = tryBreakable {
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(y == 2, y)
+
+ val z = tryBreakable {
+ break
+ 1.0
+ } catchBreak {
+ 2
+ }
+ assert(z == 2.0, z)
+ }
+
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
new file mode 100644
index 0000000000..ef0493b275
--- /dev/null
+++ b/test/files/run/t4935.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
new file mode 100644
index 0000000000..ac14fe5dbd
--- /dev/null
+++ b/test/files/run/t4935.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
new file mode 100644
index 0000000000..18631e2041
--- /dev/null
+++ b/test/files/run/t4935.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t5284.check b/test/files/run/t5284.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/run/t5284.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5284.scala b/test/files/run/t5284.scala
new file mode 100644
index 0000000000..ba0845fb8e
--- /dev/null
+++ b/test/files/run/t5284.scala
@@ -0,0 +1,25 @@
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which accepts an instance of type `T` - we know we can
+ * safely cast `T` to `W` whenever type bounds on `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val a = Blarg(Array(1, 2, 3))
+ println(a.m((x: Int) => x + 1))
+ }
+}
+
+
+object Blarg {
+ def apply[T: Manifest](a: Array[T]) = new Blarg(a)
+}
+
+
+class Blarg[@specialized(Int) T: Manifest](val a: Array[T]) {
+ def m[@specialized(Int) W >: T, @specialized(Int) S](f: W => S) = f(a(0))
+}
diff --git a/test/files/run/t5284b.check b/test/files/run/t5284b.check
new file mode 100644
index 0000000000..98d9bcb75a
--- /dev/null
+++ b/test/files/run/t5284b.check
@@ -0,0 +1 @@
+17
diff --git a/test/files/run/t5284b.scala b/test/files/run/t5284b.scala
new file mode 100644
index 0000000000..a9282a895f
--- /dev/null
+++ b/test/files/run/t5284b.scala
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which expects a type `T` - we know we can
+ * safely cast `W` to `T` whenever typebounds of `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(17))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val id: T => T = x => x
+
+ def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
+}
diff --git a/test/files/run/t5284c.check b/test/files/run/t5284c.check
new file mode 100644
index 0000000000..00750edc07
--- /dev/null
+++ b/test/files/run/t5284c.check
@@ -0,0 +1 @@
+3
diff --git a/test/files/run/t5284c.scala b/test/files/run/t5284c.scala
new file mode 100644
index 0000000000..383b84c2cc
--- /dev/null
+++ b/test/files/run/t5284c.scala
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+/** Here we have a compound type `List[W]` used in
+ * a position where `List[T]` is expected. The cast
+ * emitted in the normalized `bar` is safe because the
+ * normalized `bar` can only be called if the type
+ * bounds hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(List(1, 2, 3)))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val len: List[T] => Int = xs => xs.length
+
+ def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
+}
diff --git a/test/files/run/t5966.check b/test/files/run/t5966.check
new file mode 100644
index 0000000000..bfe8358a77
--- /dev/null
+++ b/test/files/run/t5966.check
@@ -0,0 +1,3 @@
+(o()_)("") = List()
+(o("a1")_)("") = WrappedArray(a1)
+(o("a1", "a2")_)("") = WrappedArray(a1, a2)
diff --git a/test/files/run/t5966.scala b/test/files/run/t5966.scala
new file mode 100644
index 0000000000..bbe1a6e874
--- /dev/null
+++ b/test/files/run/t5966.scala
@@ -0,0 +1,9 @@
+object o { def apply(i: AnyRef*)(j: String) = i }
+
+object Test {
+ def main(args: Array[String]) {
+ println("(o()_)(\"\") = " + (o()_)(""))
+ println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
+ println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
+ }
+}