diff options
Diffstat (limited to 'src')
61 files changed, 1097 insertions, 857 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 3a527676b4..be5909a67f 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -213,17 +213,14 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { def wildcardStar(tree: Tree) = atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) } - def paramToArg(vparam: Symbol) = { - val arg = Ident(vparam) - if (isRepeatedParamType(vparam.tpe)) wildcardStar(arg) - else arg - } + def paramToArg(vparam: Symbol): Tree = + paramToArg(Ident(vparam), isRepeatedParamType(vparam.tpe)) - def paramToArg(vparam: ValDef) = { - val arg = Ident(vparam.name) - if (treeInfo.isRepeatedParamType(vparam.tpt)) wildcardStar(arg) - else arg - } + def paramToArg(vparam: ValDef): Tree = + paramToArg(Ident(vparam.name), treeInfo.isRepeatedParamType(vparam.tpt)) + + def paramToArg(arg: Ident, isRepeatedParam: Boolean): Tree = + if (isRepeatedParam) wildcardStar(arg) else arg /** Make forwarder to method `target`, passing all parameters in `params` */ def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index bce9f28847..3232bde3b4 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -253,7 +253,9 @@ self => final val InBlock = 1 final val InTemplate = 2 - lazy val ScalaValueClassNames = Seq(tpnme.AnyVal, + // These symbols may not yet be loaded (e.g. in the ide) so don't go + // through definitions to obtain the names. + lazy val ScalaValueClassNames = Seq(tpnme.AnyVal, tpnme.Unit, tpnme.Boolean, tpnme.Byte, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 0c527fbaf4..59adcc637a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -458,6 +458,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -644,6 +648,86 @@ abstract class GenASM extends SubComponent with BytecodeWriters { def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
} // end of class JBuilder
@@ -654,10 +738,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // more constants
// -----------------------------------------------------------------------------------------
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
-
val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
@@ -969,86 +1049,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { }
}
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
/** Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -3033,9 +3033,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- // TODO no inner classes attribute is written. Confirm intent.
- assert(innerClassBuffer.isEmpty, innerClassBuffer)
-
+ addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index d4ee9b6b48..5cc6e78e9d 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -100,9 +100,29 @@ abstract class DeadCodeElimination extends SubComponent { var rd = rdef.in(bb); for (Pair(i, idx) <- bb.toList.zipWithIndex) { i match { + case LOAD_LOCAL(l) => defs = defs + Pair(((bb, idx)), rd.vars) -// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs) + + case STORE_LOCAL(_) => + /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it + * (otherwise any side-effects of the module's constructor go lost). + * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM) + * are already marked (case clause below). + * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below) + * will have the module's load marked provided `isSideEffecting(m1)`. + * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity). + * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala + */ + val necessary = rdef.findDefs(bb, idx, 1) exists { p => + val (bb1, idx1) = p + bb1(idx1) match { + case LOAD_MODULE(module) => isLoadNeeded(module) + case _ => false + } + } + if (necessary) worklist += ((bb, idx)) + case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) | THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) | LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx)) @@ -129,6 +149,10 @@ abstract class DeadCodeElimination extends SubComponent { } } + private def isLoadNeeded(module: Symbol): Boolean = { + module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol + } + /** Mark useful instructions. Instructions in the worklist are each inspected and their * dependencies are marked useful too, and added to the worklist. */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index d8bf23f4fe..046b177444 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -879,7 +879,7 @@ abstract class ClassfileParser { case tpnme.ScalaSignatureATTR => if (!isScalaAnnot) { debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute") - unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString) + unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name) } in.skip(attrLen) case tpnme.ScalaATTR => @@ -897,7 +897,7 @@ abstract class ClassfileParser { case Some(san: AnnotationInfo) => val bytes = san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes - unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString) + unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name) case None => throw new RuntimeException("Scala class file does not contain Scala annotation") } @@ -1013,9 +1013,16 @@ abstract class ClassfileParser { } catch { case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found case ex: Throwable => - debuglog("dropping annotation on " + sym + ", an error occured during parsing (e.g. annotation class not found)") - - None // ignore malformed annotations ==> t1135 + // We want to be robust when annotations are unavailable, so the very least + // we can do is warn the user about the exception + // There was a reference to ticket 1135, but that is outdated: a reference to a class not on + // the classpath would *not* end up here. A class not found is signaled + // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), + // and that should never be swallowed silently. + warning("Caught: " + ex + " while parsing annotations in " + in.file) + if (settings.debug.value) ex.printStackTrace() + + None // ignore malformed annotations } /** diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index bc4483923a..e5119eac71 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -323,7 +323,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { // statements coming from the original class need retyping in the current context debuglog("retyping " + stat2) - val d = new specializeTypes.Duplicator + val d = new specializeTypes.Duplicator(Map[Symbol, Type]()) d.retyped(localTyper.context1.asInstanceOf[d.Context], stat2, genericClazz, diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index c4c769d7cf..124d350385 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -450,7 +450,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Type parameters that survive when specializing in the specified environment. */ def survivingParams(params: List[Symbol], env: TypeEnv) = - params.filter(p => !p.isSpecialized || !isPrimitiveValueType(env(p))) + params filter { + p => + !p.isSpecialized || + !env.contains(p) || + !isPrimitiveValueType(env(p)) + } /** Produces the symbols from type parameters `syms` of the original owner, * in the given type environment `env`. The new owner is `nowner`. @@ -1176,7 +1181,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { || specializedTypeVars(t1).nonEmpty || specializedTypeVars(t2).nonEmpty) } - + env forall { case (tvar, tpe) => matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || { if (warnings) @@ -1192,10 +1197,58 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } } + + def satisfiabilityConstraints(env: TypeEnv): Option[TypeEnv] = { + val noconstraints = Some(emptyEnv) + def matches(tpe1: Type, tpe2: Type): Option[TypeEnv] = { + val t1 = subst(env, tpe1) + val t2 = subst(env, tpe2) + // log("---------> " + tpe1 + " matches " + tpe2) + // log(t1 + ", " + specializedTypeVars(t1)) + // log(t2 + ", " + specializedTypeVars(t2)) + // log("unify: " + unify(t1, t2, env, false, false) + " in " + env) + if (t1 <:< t2) noconstraints + else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env, false, false) -- env.keys) + else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env, false, false) -- env.keys) + else None + } + + env.foldLeft[Option[TypeEnv]](noconstraints) { + case (constraints, (tvar, tpe)) => + val loconstraints = matches(tvar.info.bounds.lo, tpe) + val hiconstraints = matches(tpe, tvar.info.bounds.hi) + val allconstraints = for (c <- constraints; l <- loconstraints; h <- hiconstraints) yield c ++ l ++ h + allconstraints + } + } - class Duplicator extends { + /** This duplicator additionally performs casts of expressions if that is allowed by the `casts` map. */ + class Duplicator(casts: Map[Symbol, Type]) extends { val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global - } with typechecker.Duplicators + } with typechecker.Duplicators { + private val (castfrom, castto) = casts.unzip + private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList) + + class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) { + override def castType(tree: Tree, pt: Type): Tree = { + // log(" expected type: " + pt) + // log(" tree type: " + tree.tpe) + tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null + // log(" tree type: " + tree.tpe) + val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) { + val casttpe = CastMap(tree.tpe) + if (casttpe <:< pt) gen.mkCast(tree, casttpe) + else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt) + else tree + } else tree + ntree.tpe = null + ntree + } + } + + protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context) + + } /** A tree symbol substituter that substitutes on type skolems. * If a type parameter is a skolem, it looks for the original @@ -1302,8 +1355,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } } + + def reportError[T](body: =>T)(handler: TypeError => T): T = + try body + catch { + case te: TypeError => + reporter.error(te.pos, te.msg) + handler(te) + } - override def transform(tree: Tree): Tree = { + override def transform(tree: Tree): Tree = + reportError { transform1(tree) } {_ => tree} + + def transform1(tree: Tree) = { val symbol = tree.symbol /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */ @@ -1329,14 +1393,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } else None } - def reportError[T](body: =>T)(handler: TypeError => T): T = - try body - catch { - case te: TypeError => - reporter.error(tree.pos, te.msg) - handler(te) - } - curTree = tree tree match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => @@ -1448,13 +1504,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) => // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol)) - def reportTypeError(body: =>Tree) = - try body - catch { - case te: TypeError => - reporter.error(te.pos, te.toString) - ddef - } + def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef) + if (symbol.isConstructor) { val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner)) @@ -1475,14 +1526,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { deriveDefDef(tree1)(transform) case NormalizedMember(target) => - debuglog("Normalized member: " + symbol + ", target: " + target) - if (target.isDeferred || conflicting(typeEnv(symbol))) { + val constraints = satisfiabilityConstraints(typeEnv(symbol)) + log("constraints: " + constraints) + if (target.isDeferred || constraints == None) { deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called.")) - } - else { + } else { // we have an rhs, specialize it val tree1 = reportTypeError { - duplicateBody(ddef, target) + duplicateBody(ddef, target, constraints.get) } debuglog("implementation: " + tree1) deriveDefDef(tree1)(transform) @@ -1546,7 +1597,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate) debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName) - val d = new Duplicator + val d = new Duplicator(emptyEnv) val newValDef = d.retyped( localTyper.context1.asInstanceOf[d.Context], tree1, @@ -1571,12 +1622,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { super.transform(tree) } } - - private def duplicateBody(tree: DefDef, source: Symbol) = { + + /** Duplicate the body of the given method `tree` to the new symbol `source`. + * + * Knowing that the method can be invoked only in the `castmap` type environment, + * this method will insert casts for all the expressions of types mappend in the + * `castmap`. + */ + private def duplicateBody(tree: DefDef, source: Symbol, castmap: TypeEnv = emptyEnv) = { val symbol = tree.symbol val meth = addBody(tree, source) - val d = new Duplicator + val d = new Duplicator(castmap) debuglog("-->d DUPLICATING: " + meth) d.retyped( localTyper.context1.asInstanceOf[d.Context], diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 6386273c9d..63d1bd0e9f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -21,7 +21,7 @@ abstract class Duplicators extends Analyzer { def retyped(context: Context, tree: Tree): Tree = { resetClassOwners - (new BodyDuplicator(context)).typed(tree) + (newBodyDuplicator(context)).typed(tree) } /** Retype the given tree in the given context. Use this method when retyping @@ -37,15 +37,17 @@ abstract class Duplicators extends Analyzer { envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList) debuglog("retyped with env: " + env) - (new BodyDuplicator(context)).typed(tree) + newBodyDuplicator(context).typed(tree) } + protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context) + def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = - (new BodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) + (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) /** Return the special typer for duplicate method bodies. */ override def newTyper(context: Context): Typer = - new BodyDuplicator(context) + newBodyDuplicator(context) private def resetClassOwners() { oldClassOwner = null @@ -209,6 +211,11 @@ abstract class Duplicators extends Analyzer { } } + /** Optionally cast this tree into some other type, if required. + * Unless overridden, just returns the tree. + */ + def castType(tree: Tree, pt: Type): Tree = tree + /** Special typer method for re-type checking trees. It expects a typed tree. * Returns a typed tree that has fresh symbols for all definitions in the original tree. * @@ -319,10 +326,10 @@ abstract class Duplicators extends Analyzer { super.typed(atPos(tree.pos)(tree1), mode, pt) case This(_) => - // log("selection on this, plain: " + tree) + debuglog("selection on this, plain: " + tree) tree.symbol = updateSym(tree.symbol) - tree.tpe = null - val tree1 = super.typed(tree, mode, pt) + val ntree = castType(tree, pt) + val tree1 = super.typed(ntree, mode, pt) // log("plain this typed to: " + tree1) tree1 /* no longer needed, because Super now contains a This(...) @@ -358,16 +365,18 @@ abstract class Duplicators extends Analyzer { case EmptyTree => // no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts tree - + case _ => - // log("Duplicators default case: " + tree.summaryString + " -> " + tree) + debuglog("Duplicators default case: " + tree.summaryString) + debuglog(" ---> " + tree) if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) { tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==) } - tree.tpe = null - super.typed(tree, mode, pt) + val ntree = castType(tree, pt) + super.typed(ntree, mode, pt) } } + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index e1fb683aa9..69b27045ab 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -88,9 +88,11 @@ trait EtaExpansion { self: Analyzer => defs ++= stats liftoutPrefix(fun) case Apply(fn, args) => - val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)) - // zipAll: with repeated params, there might be more args than params - val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) } + val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift + val newArgs = mapWithIndex(args) { (arg, i) => + // with repeated params, there might be more or fewer args than params + liftout(arg, byName(i).getOrElse(false)) + } treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null case TypeApply(fn, args) => treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null @@ -107,11 +109,20 @@ trait EtaExpansion { self: Analyzer => */ def expand(tree: Tree, tpe: Type): Tree = tpe match { case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit => - val params = paramSyms map (sym => - ValDef(Modifiers(SYNTHETIC | PARAM), - sym.name.toTermName, TypeTree(sym.tpe) , EmptyTree)) + val params: List[(ValDef, Boolean)] = paramSyms.map { + sym => + val origTpe = sym.tpe + val isRepeated = definitions.isRepeatedParamType(origTpe) + // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala + val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe) + val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree) + (valDef, isRepeated) + } atPos(tree.pos.makeTransparent) { - Function(params, expand(Apply(tree, params map gen.paramToArg), restpe)) + val args = params.map { + case (valDef, isRepeated) => gen.paramToArg(Ident(valDef.name), isRepeated) + } + Function(params.map(_._1), expand(Apply(tree, args), restpe)) } case _ => tree diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index f7e00109ae..68782379a6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1125,7 +1125,7 @@ trait Implicits { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - Statistics.incCounter(implicitCacheHits) + Statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => Statistics.incCounter(implicitCacheHits) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 688dcd91ac..e99c31374e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1104,7 +1104,9 @@ trait Infer { try { // debuglog("TVARS "+ (tvars map (_.constr))) // look at the argument types of the primary constructor corresponding to the pattern - val variances = undetparams map varianceInType(ctorTp.paramTypes.headOption getOrElse ctorTp) + val variances = + if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp) + else undetparams map varianceInTypes(ctorTp.paramTypes) val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt))) // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ") // no checkBounds here. If we enable it, test bug602 fails. diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala index bde3ad98c9..3eff5ef024 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala @@ -86,10 +86,6 @@ trait Modes { */ final val TYPEPATmode = 0x10000 - /** RETmode is set when we are typing a return expression. - */ - final val RETmode = 0x20000 - final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode final def onlyStickyModes(mode: Int) = diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 9b8ddffb49..c466206192 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -123,7 +123,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def zero: M[Nothing] def one[T](x: P[T]): M[T] def guard[T](cond: P[Boolean], then: => P[T]): M[T] - def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt } * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) @@ -137,7 +136,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) - def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty } */ @@ -2006,7 +2004,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL private val uniques = new collection.mutable.HashMap[Tree, Var] def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe)) } - class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar { + class Var(val path: Tree, fullTp: Type) extends AbsVar { private[this] val id: Int = Var.nextId // private[this] var canModify: Option[Array[StackTraceElement]] = None @@ -2028,26 +2026,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // we enumerate the subtypes of the full type, as that allows us to filter out more types statically, // once we go to run-time checks (on Const's), convert them to checkable types // TODO: there seems to be bug for singleton domains (variable does not show up in model) - lazy val domain: Option[Set[Const]] = - if (!checked) None - else { - val subConsts = enumerateSubtypes(fullTp).map{ tps => - tps.toSet[Type].map{ tp => - val domainC = TypeConst(tp) - registerEquality(domainC) - domainC - } + lazy val domain: Option[Set[Const]] = { + val subConsts = enumerateSubtypes(fullTp).map{ tps => + tps.toSet[Type].map{ tp => + val domainC = TypeConst(tp) + registerEquality(domainC) + domainC } + } - val allConsts = - if (! _considerNull) subConsts - else { - registerEquality(NullConst) - subConsts map (_ + NullConst) - } + val allConsts = + if (! _considerNull) subConsts + else { + registerEquality(NullConst) + subConsts map (_ + NullConst) + } - observed; allConsts - } + observed; allConsts + } // accessing after calling considerNull will result in inconsistencies lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo } @@ -2157,6 +2153,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // the equals inherited from AnyRef does just this } + // find most precise super-type of tp that is a class + // we skip non-class types (singleton types, abstract types) so that we can + // correctly compute how types relate in terms of the values they rule out + // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes) + // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes + // (At least conceptually: `true` is an instance of class `Boolean`) + private def widenToClass(tp: Type) = { + // getOrElse to err on the safe side -- all BTS should end in Any, right? + val wideTp = tp.widen + val clsTp = + if (wideTp.typeSymbol.isClass) wideTp + else wideTp.baseTypeSeq.toList.find(_.typeSymbol.isClass).getOrElse(AnyClass.tpe) + // patmatDebug("Widening to class: "+ (tp, clsTp, tp.widen, tp.widen.baseTypeSeq, tp.widen.baseTypeSeq.toList.find(_.typeSymbol.isClass))) + clsTp + } object TypeConst { def apply(tp: Type) = { @@ -2172,7 +2183,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL assert(!(tp =:= NullTp)) private[this] val id: Int = Const.nextTypeId - val wideTp = tp.widen + val wideTp = widenToClass(tp) override def toString = tp.toString //+"#"+ id } @@ -2191,10 +2202,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL val tp = p.tpe.normalize if (tp =:= NullTp) NullConst else { - val wideTp = { - if (p.hasSymbol && p.symbol.isStable) tp.asSeenFrom(tp.prefix, p.symbol.owner).widen - else tp.widen - } + val wideTp = widenToClass(tp) val narrowTp = if (tp.isInstanceOf[SingletonType]) tp @@ -2354,14 +2362,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // exhaustivity - // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte - // TODO: domain of feasibly enumerable built-in types (enums, char?) + // TODO: domain of other feasibly enumerable built-in types (char?) def enumerateSubtypes(tp: Type): Option[List[Type]] = tp.typeSymbol match { + // TODO case _ if tp.isTupleType => // recurse into component types? + case UnitClass => + Some(List(UnitClass.tpe)) case BooleanClass => // patmatDebug("enum bool "+ tp) Some(List(ConstantType(Constant(true)), ConstantType(Constant(false)))) // TODO case _ if tp.isTupleType => // recurse into component types + case modSym: ModuleClassSymbol => + Some(List(tp)) + // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte case sym if !sym.isSealed || isPrimitiveValueClass(sym) => // patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym))) None diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 119bb0852c..44fd4e9afd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1150,7 +1150,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R nonSensiblyNew() else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y nonSensiblyNew() - else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y + else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y if (isEitherNullable) nonSensible("non-null ", false) else diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index daae69590f..f67cec730b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -124,7 +124,15 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT !(member.isAbstractOverride && member.isIncompleteIn(clazz))) unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ "unless it is overridden by a member declared `abstract' and `override'"); + } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){ + // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. + val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner) + intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach { + absSym => + unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract") + } } + if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner)) ensureAccessor(sel) else sel diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 69d3fd7f47..52cce11deb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1833,7 +1833,7 @@ trait Typers extends Modes with Adaptations with Tags { val params = fn.tpe.params val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args else args.take(params.length - 1) :+ EmptyTree - assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug + assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug (superConstr, args1 ::: args2) case Block(stats, expr) if !stats.isEmpty => decompose(stats.last) @@ -2036,7 +2036,7 @@ trait Typers extends Modes with Adaptations with Tags { transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) } - if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) { + if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) { // At this point in AnyVal there is no supercall, which will blow up // in computeParamAliases; there's nothing to be computed for Anyval anyway. if (meth.isPrimaryConstructor) @@ -3103,66 +3103,67 @@ trait Typers extends Modes with Adaptations with Tags { val otpe = fun.tpe - if (args.length > MaxTupleArity) - return duplErrorTree(TooManyArgsPatternError(fun)) - - // - def freshArgType(tp: Type): (List[Symbol], Type) = tp match { - case MethodType(param :: _, _) => - (Nil, param.tpe) - case PolyType(tparams, restpe) => - createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t))) - // No longer used, see test case neg/t960.scala (#960 has nothing to do with it) - case OverloadedType(_, _) => - OverloadedUnapplyError(fun) - (Nil, ErrorType) - case _ => - UnapplyWithSingleArgError(fun) - (Nil, ErrorType) - } + if (args.length > MaxTupleArity) + return duplErrorTree(TooManyArgsPatternError(fun)) + + // + def freshArgType(tp: Type): (List[Symbol], Type) = tp match { + case MethodType(param :: _, _) => + (Nil, param.tpe) + case PolyType(tparams, restpe) => + createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t))) + // No longer used, see test case neg/t960.scala (#960 has nothing to do with it) + case OverloadedType(_, _) => + OverloadedUnapplyError(fun) + (Nil, ErrorType) + case _ => + UnapplyWithSingleArgError(fun) + (Nil, ErrorType) + } - val unapp = unapplyMember(otpe) - val unappType = otpe.memberType(unapp) - val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt - val arg = Ident(argDummy) setType pt + val unapp = unapplyMember(otpe) + val unappType = otpe.memberType(unapp) + val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt + val arg = Ident(argDummy) setType pt val uncheckedTypeExtractor = if (unappType.paramTypes.nonEmpty) extractorForUncheckedType(tree.pos, unappType.paramTypes.head) else None - if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) { - //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType) - val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree)) - val unapplyContext = context.makeNewScope(context.tree, context.owner) - freeVars foreach unapplyContext.scope.enter + if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) { + //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType) + val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree)) + val unapplyContext = context.makeNewScope(context.tree, context.owner) + freeVars foreach unapplyContext.scope.enter - val typer1 = newTyper(unapplyContext) + val typer1 = newTyper(unapplyContext) val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty) - // turn any unresolved type variables in freevars into existential skolems - val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) - arg.tpe = pattp.substSym(freeVars, skolems) - argDummy setInfo arg.tpe - } + // turn any unresolved type variables in freevars into existential skolems + val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) + arg.tpe = pattp.substSym(freeVars, skolems) + argDummy setInfo arg.tpe + } - // setType null is necessary so that ref will be stabilized; see bug 881 - val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg))) + // setType null is necessary so that ref will be stabilized; see bug 881 + val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg))) - if (fun1.tpe.isErroneous) { - duplErrTree - } else { - val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe) - val formals1 = formalTypes(formals0, args.length) - if (sameLength(formals1, args)) { - val args1 = typedArgs(args, mode, formals0, formals1) - // This used to be the following (failing) assert: - // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt) - // I modified as follows. See SI-1048. - val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) - - val itype = glb(List(pt1, arg.tpe)) - arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) + if (fun1.tpe.isErroneous) duplErrTree + else { + val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe) + val formals1 = formalTypes(formals0, args.length) + + if (!sameLength(formals1, args)) duplErrorTree(WrongNumberArgsPatternError(tree, fun)) + else { + val args1 = typedArgs(args, mode, formals0, formals1) + // This used to be the following (failing) assert: + // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt) + // I modified as follows. See SI-1048. + val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) + + val itype = glb(List(pt1, arg.tpe)) + arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) val unapply = UnApply(fun1, args1) setPos tree.pos setType itype // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor @@ -3170,9 +3171,8 @@ trait Typers extends Modes with Adaptations with Tags { // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever) if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head) - } else - duplErrorTree(WrongNumberArgsPatternError(tree, fun)) - } + } + } } def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = { @@ -4024,8 +4024,7 @@ trait Typers extends Modes with Adaptations with Tags { ReturnWithoutTypeError(tree, enclMethod.owner) } else { context.enclMethod.returnsSeen = true - val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe) - + val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe) // Warn about returning a value if no value can be returned. if (restpt.tpe.typeSymbol == UnitClass) { // The typing in expr1 says expr is Unit (it has already been coerced if @@ -4687,7 +4686,10 @@ trait Typers extends Modes with Adaptations with Tags { ) val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual) // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right? - stabilize(tree2, pre2, mode, pt) + val tree3 = stabilize(tree2, pre2, mode, pt) + // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid + // inference errors in pattern matching. + tree3 setType dropRepeatedParamType(tree3.tpe) } } } @@ -4866,9 +4868,10 @@ trait Typers extends Modes with Adaptations with Tags { for (cdef <- catches1 if cdef.guard.isEmpty) { def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.") + def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol cdef.pat match { - case Bind(name, Ident(_)) => warn(name) - case Ident(name) => warn(name) + case Bind(name, i@Ident(_)) if unbound(i) => warn(name) + case i@Ident(name) if unbound(i) => warn(name) case _ => } } @@ -4978,7 +4981,7 @@ trait Typers extends Modes with Adaptations with Tags { typedTypeApply(tree, mode, fun1, args1) case Apply(Block(stats, expr), args) => - typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt) + typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt) case Apply(fun, args) => typedApply(fun, args) match { @@ -5131,7 +5134,7 @@ trait Typers extends Modes with Adaptations with Tags { indentTyping() var alreadyTyped = false - val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass) + val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) Statistics.incCounter(visitsByType, tree.getClass) try { if (context.retyping && @@ -5187,7 +5190,7 @@ trait Typers extends Modes with Adaptations with Tags { } finally { deindentTyping() - Statistics.popTimerClass(byTypeNanos, startByType) + Statistics.popTimer(byTypeStack, startByType) } } @@ -5375,10 +5378,11 @@ object TypersStats { val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) - val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos) - val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos) - val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos) - val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos) - val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter("")) - val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos) + val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos) + val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos) + val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos) + val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos) + val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter("")) + val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos)) + val byTypeStack = Statistics.newTimerStack() } diff --git a/src/compiler/scala/tools/nsc/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 4f4db83339..f8ded56ec6 100644 --- a/src/compiler/scala/tools/nsc/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -1,12 +1,15 @@ -package scala.tools.nsc +package scala.tools +package reflect -import reporters.Reporter +import scala.tools.nsc.Global +import scala.tools.nsc.reporters.Reporter +import scala.tools.nsc.Settings /** A version of Global that uses reflection to get class * infos, instead of reading class or source files. */ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) - extends Global(currentSettings, reporter) with scala.tools.nsc.ReflectSetup with scala.reflect.runtime.SymbolTable { + extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { override def transformedType(sym: Symbol) = erasure.transformInfo(sym, diff --git a/src/compiler/scala/tools/nsc/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 161391fc2c..116ae24cdd 100644 --- a/src/compiler/scala/tools/nsc/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -1,8 +1,12 @@ -package scala.tools.nsc +package scala.tools +package reflect -import tools.util.PathResolver -import util.ClassPath.DefaultJavaContext -import util.ScalaClassLoader +import scala.tools.nsc.Driver +import scala.tools.nsc.Global +import scala.tools.nsc.Settings +import scala.tools.nsc.util.ClassPath.DefaultJavaContext +import scala.tools.nsc.util.ScalaClassLoader +import scala.tools.util.PathResolver object ReflectMain extends Driver { diff --git a/src/compiler/scala/tools/nsc/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala index 26c720a10f..f18c114d62 100644 --- a/src/compiler/scala/tools/nsc/ReflectSetup.scala +++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala @@ -1,7 +1,10 @@ -package scala.tools.nsc +package scala.tools +package reflect + +import scala.tools.nsc.Global /** A helper trait to initialize things that need to be set before JavaMirrors and other * reflect specific traits are initialized */ -private[nsc] trait ReflectSetup { this: Global => +private[reflect] trait ReflectSetup { this: Global => phase = new Run().typerPhase }
\ No newline at end of file diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 278f4e3ff7..b4178102b9 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -2,7 +2,6 @@ package scala.tools package reflect import scala.tools.nsc.reporters._ -import scala.tools.nsc.ReflectGlobal import scala.tools.nsc.CompilerCommand import scala.tools.nsc.Global import scala.tools.nsc.typechecker.Modes diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala index 464ffc6fab..a20ff1667b 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala @@ -171,9 +171,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { vprintln("yes we can!! (byval)") return true } - } else if ((mode & global.analyzer.RETmode) != 0) { - vprintln("yes we can!! (return)") - return true } } false @@ -187,7 +184,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { val patMode = (mode & global.analyzer.PATTERNmode) != 0 val exprMode = (mode & global.analyzer.EXPRmode) != 0 val byValMode = (mode & global.analyzer.BYVALmode) != 0 - val retMode = (mode & global.analyzer.RETmode) != 0 val annotsTree = cpsParamAnnotation(tree.tpe) val annotsExpected = cpsParamAnnotation(pt) @@ -214,12 +210,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { val res = tree modifyType addMinusMarker vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe) res - } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { - // add a marker annotation that will make tree.tpe behave as pt, subtyping wise - // tree will look like having no annotation - val res = tree modifyType (_ withAnnotations List(newPlusMarker())) - vprintln("adapted annotations (return) of " + tree + " to " + res.tpe) - res } else tree } @@ -476,11 +466,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { } tpe - case ret @ Return(expr) => - if (hasPlusMarker(expr.tpe)) - ret setType expr.tpe - ret.tpe - case _ => tpe } diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala index 765cde5a81..3a1dc87a6a 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala @@ -3,7 +3,6 @@ package scala.tools.selectivecps import scala.tools.nsc.Global -import scala.collection.mutable.ListBuffer trait CPSUtils { val global: Global @@ -136,43 +135,4 @@ trait CPSUtils { case _ => None } } - - def isTailReturn(retExpr: Tree, body: Tree): Boolean = { - val removed = ListBuffer[Tree]() - removeTailReturn(body, removed) - removed contains retExpr - } - - def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match { - case Block(stms, r @ Return(expr)) => - removed += r - treeCopy.Block(tree, stms, expr) - - case Block(stms, expr) => - treeCopy.Block(tree, stms, removeTailReturn(expr, removed)) - - case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) => - removed ++= Seq(r1, r2) - treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed)) - - case If(cond, thenExpr, elseExpr) => - treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed)) - - case Try(block, catches, finalizer) => - treeCopy.Try(tree, - removeTailReturn(block, removed), - (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]], - removeTailReturn(finalizer, removed)) - - case CaseDef(pat, guard, r @ Return(expr)) => - removed += r - treeCopy.CaseDef(tree, pat, guard, expr) - - case CaseDef(pat, guard, body) => - treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed)) - - case _ => - tree - } - } diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index fe465aad0d..017c8d24fd 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -9,8 +9,6 @@ import scala.tools.nsc.plugins._ import scala.tools.nsc.ast._ -import scala.collection.mutable.ListBuffer - /** * In methods marked @cps, explicitly name results of calls to other @cps methods */ @@ -48,20 +46,10 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with // this would cause infinite recursion. But we could remove the // ValDef case here. - case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) => + case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) => debuglog("transforming " + dd.symbol) atOwner(dd.symbol) { - val tailReturns = ListBuffer[Tree]() - val rhs = removeTailReturn(rhs0, tailReturns) - // throw an error if there is a Return tree which is not in tail position - rhs0 foreach { - case r @ Return(_) => - if (!tailReturns.contains(r)) - unit.error(r.pos, "return expressions in CPS code must be in tail position") - case _ => /* do nothing */ - } - val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe)) debuglog("result "+rhs1) @@ -165,6 +153,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with } } + def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = { transTailValue(tree, cpsA, cpsR) match { case (Nil, b) => b diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index d00414751a..d961bd84bb 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -27,7 +27,7 @@ import generic._ * @since 2.8 */ trait DefaultMap[A, +B] extends Map[A, B] { self => - + /** A default implementation which creates a new immutable map. */ override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { @@ -41,7 +41,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => */ override def - (key: A): Map[A, B] = { val b = newBuilder - b ++= this filter (key != _) + b ++= this filter (key != _._1) b.result } } diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index eaec7a2a76..0d51230623 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -411,12 +411,3 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with def stringPrefix: String } - -object GenTraversableLike { - /** Manufacture a conversion from collection representation type `Repr` to - * its corresponding `GenTraversableLike` given an implicitly available - * instance of `FromRepr[Repr]`. - * @see [[scala.collection.generic.FromRepr]] - */ - implicit def fromRepr[Repr](implicit fr : FromRepr[Repr]) = fr.hasElem -} diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index e475865391..25edcfe19c 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -565,10 +565,10 @@ trait GenTraversableOnce[+A] extends Any { * @tparam Col The collection type to build. * @return a new collection containing all elements of this $coll. * - * @usecase def convertTo[Col[_]]: Col[A] + * @usecase def to[Col[_]]: Col[A] * @inheritdoc * $willNotTerminateInf * @return a new collection containing all elements of this $coll. */ - def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] + def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] } diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala index c842475590..e0c8b21d09 100644 --- a/src/library/scala/collection/IterableViewLike.scala +++ b/src/library/scala/collection/IterableViewLike.scala @@ -44,7 +44,7 @@ trait IterableViewLike[+A, } /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends super[TraversableViewLike].Transformed[B] with Transformed[B] + private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B] trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 42a56a9c5a..19c24c9791 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -51,6 +51,7 @@ object Map extends MapFactory[Map] { def iterator = underlying.iterator override def default(key: A): B = d(key) } + } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 75f9ff93db..55d482f6c8 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -226,7 +226,7 @@ self => */ def default(key: A): B = throw new NoSuchElementException("key not found: " + key) - + protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] { override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv) def iterator = self.iterator.filter(kv => p(kv._1)) @@ -240,7 +240,7 @@ self => * the predicate `p`. The resulting map wraps the original map without copying any elements. */ def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) - + protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] { override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v))) def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala index f64045c9f6..73f5dda11c 100644 --- a/src/library/scala/collection/SeqViewLike.scala +++ b/src/library/scala/collection/SeqViewLike.scala @@ -40,7 +40,7 @@ trait SeqViewLike[+A, } /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends super[IterableViewLike].AbstractTransformed[B] with Transformed[B] + private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B] trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 3f92908848..e32e0977df 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -30,9 +30,26 @@ trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B * @since 2.8 */ object SortedMap extends SortedMapFactory[SortedMap] { - def empty[A, B](implicit ord: Ordering[A]): immutable.SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord) + def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord) implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] + + private[collection] trait Default[A, +B] extends SortedMap[A, B] { + self => + override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { + val b = SortedMap.newBuilder[A, B1] + b ++= this + b += ((kv._1, kv._2)) + b.result + } + + override def - (key: A): SortedMap[A, B] = { + val b = newBuilder + for (kv <- this; if kv._1 != key) b += kv + b.result + } + } + } diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 4dc0820a62..f9e95230ea 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -72,4 +72,27 @@ self => for (e <- elems) m = m + e m } + + override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { + implicit def ordering: Ordering[A] = self.ordering + override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) + } + + override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { + implicit def ordering: Ordering[A] = self.ordering + override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) + } + + /** Adds a number of elements provided by a traversable object + * and returns a new collection with the added elements. + * + * @param xs the traversable object. + */ + override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = + ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) + } + + + + diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index e5861f5760..9356832afd 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -617,7 +617,7 @@ trait TraversableLike[+A, +Repr] extends Any def toIterator: Iterator[A] = toStream.iterator def toStream: Stream[A] = toBuffer.toStream // Override to provide size hint. - override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { + override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { val b = cbf() b.sizeHint(this) b ++= thisCollection diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 8dc6184d88..fb73805cc5 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -240,21 +240,21 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { def toTraversable: Traversable[A] - def toList: List[A] = convertTo[List] + def toList: List[A] = to[List] def toIterable: Iterable[A] = toStream def toSeq: Seq[A] = toStream - def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq] + def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq] - def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]] + def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]] - def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]] + def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]] - def toVector: Vector[A] = convertTo[Vector] + def toVector: Vector[A] = to[Vector] - def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { + def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { val b = cbf() b ++= seq b.result diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index eb2091a5f3..bf4f8205d6 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -117,7 +117,7 @@ trait TraversableViewLike[+A, } /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends Transformed[B] + private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B] trait EmptyView extends Transformed[Nothing] with super.EmptyView diff --git a/src/library/scala/collection/generic/FromRepr.scala b/src/library/scala/collection/generic/IsTraversableLike.scala index c08761332c..7288322903 100644 --- a/src/library/scala/collection/generic/FromRepr.scala +++ b/src/library/scala/collection/generic/IsTraversableLike.scala @@ -18,14 +18,12 @@ package generic * * Example usage, * {{{ - * import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem } - * - * class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) { - * def filterMap[B, That](f : A => Option[B]) - * (implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq) + * class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) { + * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = + * r.flatMap(f(_).toSeq) * } - * - * implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r) + * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = + * new FilterMapImpl(fr.conversion(r)) * * val l = List(1, 2, 3, 4, 5) * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) @@ -33,24 +31,28 @@ package generic * }}} * * @author Miles Sabin + * @author J. Suereth * @since 2.10 */ -trait FromRepr[Repr] { +trait IsTraversableLike[Repr] { + /** The type of elements we can traverse over. */ type A - val hasElem: HasElem[Repr, A] + /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */ + val conversion: Repr => GenTraversableLike[A, Repr] } -object FromRepr { +object IsTraversableLike { import language.higherKinds - implicit val stringFromRepr : FromRepr[String] { type A = Char } = new FromRepr[String] { - type A = Char - val hasElem = implicitly[HasElem[String, Char]] - } + implicit val stringRepr: IsTraversableLike[String] { type A = Char } = + new IsTraversableLike[String] { + type A = Char + val conversion = implicitly[String => GenTraversableLike[Char, String]] + } - implicit def genTraversableLikeFromRepr[C[_], A0] - (implicit hasElem0: HasElem[C[A0], A0]) : FromRepr[C[A0]] { type A = A0 } = new FromRepr[C[A0]] { + implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } = + new IsTraversableLike[C[A0]] { type A = A0 - val hasElem = hasElem0 + val conversion = conv } } diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala new file mode 100644 index 0000000000..b336553231 --- /dev/null +++ b/src/library/scala/collection/generic/IsTraversableOnce.scala @@ -0,0 +1,62 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package generic + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `GenTraversableOnce[A]`. + * + * This type enables simple enrichment of `GenTraversableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) { + * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = { + * val b = cbf() + * for(e <- r.seq) f(e) foreach (b +=) + * b.result + * } + * } + * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = + * new FilterMapImpl[fr.A, Repr](fr.conversion(r)) + * + * val l = List(1, 2, 3, 4, 5) + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + * + * @author Miles Sabin + * @author J. Suereth + * @since 2.10 + */ +trait IsTraversableOnce[Repr] { + /** The type of elements we can traverse over. */ + type A + /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */ + val conversion: Repr => GenTraversableOnce[A] +} + +object IsTraversableOnce { + import language.higherKinds + + implicit val stringRepr: IsTraversableOnce[String] { type A = Char } = + new IsTraversableOnce[String] { + type A = Char + val conversion = implicitly[String => GenTraversableOnce[Char]] + } + + implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } = + new IsTraversableOnce[C[A0]] { + type A = A0 + val conversion = conv + } +} + diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala index 85b9995f2e..6eecb5e3ff 100644 --- a/src/library/scala/collection/generic/package.scala +++ b/src/library/scala/collection/generic/package.scala @@ -6,12 +6,6 @@ import language.higherKinds package object generic { type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To] - /** The type of conversions from a collection representation type - * `Repr` to its corresponding GenTraversableLike. - * @see [[scala.collection.generic.FromRepr]] - */ - type HasElem[Repr, A] = Repr => GenTraversableLike[A, Repr] - @deprecated("use ClassTagTraversableFactory instead", "2.10.0") type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC] @@ -20,4 +14,4 @@ package object generic { @deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0") type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC] -}
\ No newline at end of file +} diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index 039a57041c..e895c94599 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder } private[immutable] object IntMapUtils extends BitOperations.Int { def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - def join[T](p1 : Int, t1 : IntMap[T], p2 : Int, t2 : IntMap[T]) : IntMap[T] = { - val m = branchMask(p1, p2); - val p = mask(p1, m); + def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) - else IntMap.Bin(p, m, t2, t1); + else IntMap.Bin(p, m, t2, t1) } - def bin[T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) : IntMap[T] = (left, right) match { - case (left, IntMap.Nil) => left; - case (IntMap.Nil, right) => right; - case (left, right) => IntMap.Bin(prefix, mask, left, right); + def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { + case (left, IntMap.Nil) => left + case (IntMap.Nil, right) => right + case (left, right) => IntMap.Bin(prefix, mask, left, right) } } @@ -50,9 +50,9 @@ object IntMap { } def empty[T] : IntMap[T] = IntMap.Nil; - def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value); - def apply[T](elems : (Int, T)*) : IntMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)); + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value); + def apply[T](elems: (Int, T)*): IntMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) private[immutable] case object Nil extends IntMap[Nothing] { // Important! Without this equals method in place, an infinite @@ -66,15 +66,15 @@ object IntMap { } } - private[immutable] case class Tip[+T](key : Int, value : T) extends IntMap[T]{ + private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]]; - else IntMap.Tip(key, s); + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] + else IntMap.Tip(key, s) } - private[immutable] case class Bin[+T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) extends IntMap[T]{ - def bin[S](left : IntMap[S], right : IntMap[S]) : IntMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]]; - else IntMap.Bin[S](prefix, mask, left, right); + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { + def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] + else IntMap.Bin[S](prefix, mask, left, right) } } @@ -83,60 +83,60 @@ object IntMap { import IntMap._ // Iterator over a non-empty IntMap. -private[immutable] abstract class IntMapIterator[V, T](it : IntMap[V]) extends AbstractIterator[T] { +private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { // Basically this uses a simple stack to emulate conversion over the tree. However // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack // depth is 33 and - var index = 0; - var buffer = new Array[AnyRef](33); + var index = 0 + var buffer = new Array[AnyRef](33) def pop = { - index -= 1; - buffer(index).asInstanceOf[IntMap[V]]; + index -= 1 + buffer(index).asInstanceOf[IntMap[V]] } - def push(x : IntMap[V]) { - buffer(index) = x.asInstanceOf[AnyRef]; - index += 1; + def push(x: IntMap[V]) { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 } - push(it); + push(it) /** * What value do we assign to a tip? */ - def valueOf(tip : IntMap.Tip[V]) : T; + def valueOf(tip: IntMap.Tip[V]): T - def hasNext = index != 0; - final def next : T = + def hasNext = index != 0 + final def next: T = pop match { case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { - push(right); - valueOf(t); + push(right) + valueOf(t) } case IntMap.Bin(_, _, left, right) => { - push(right); - push(left); - next; + push(right) + push(left) + next } - case t@IntMap.Tip(_, _) => valueOf(t); + case t@IntMap.Tip(_, _) => valueOf(t) // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap // and don't return an IntMapIterator for IntMap.Nil. - case IntMap.Nil => sys.error("Empty maps not allowed as subtrees"); + case IntMap.Nil => sys.error("Empty maps not allowed as subtrees") } } -private[immutable] class IntMapEntryIterator[V](it : IntMap[V]) extends IntMapIterator[V, (Int, V)](it){ - def valueOf(tip : IntMap.Tip[V]) = (tip.key, tip.value); +private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { + def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) } -private[immutable] class IntMapValueIterator[V](it : IntMap[V]) extends IntMapIterator[V, V](it){ - def valueOf(tip : IntMap.Tip[V]) = tip.value +private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.value } -private[immutable] class IntMapKeyIterator[V](it : IntMap[V]) extends IntMapIterator[V, Int](it){ - def valueOf(tip : IntMap.Tip[V]) = tip.key +private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.key } import IntMap._ @@ -145,7 +145,7 @@ import IntMap._ * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a> * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. * - * Note: This class is as of 2.8 largely superseded by HashMap. + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. * * @tparam T type of the values associated with integer keys. * @@ -155,17 +155,16 @@ import IntMap._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -sealed abstract class IntMap[+T] -extends AbstractMap[Int, T] +sealed abstract class IntMap[+T] extends AbstractMap[Int, T] with Map[Int, T] with MapLike[Int, T, IntMap[T]] { - override def empty: IntMap[T] = IntMap.Nil; + override def empty: IntMap[T] = IntMap.Nil override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T)]; - foreach(buffer += _); - buffer.toList; + val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + foreach(buffer += _) + buffer.toList } /** @@ -173,109 +172,112 @@ extends AbstractMap[Int, T] * * @return an iterator over pairs of integer keys and corresponding values. */ - def iterator : Iterator[(Int, T)] = this match { - case IntMap.Nil => Iterator.empty; - case _ => new IntMapEntryIterator(this); + def iterator: Iterator[(Int, T)] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapEntryIterator(this) } /** * Loops over the key, value pairs of the map in unsigned order of the keys. */ - override final def foreach[U](f : ((Int, T)) => U) : Unit = this match { - case IntMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); } - case IntMap.Tip(key, value) => f((key, value)); - case IntMap.Nil => {}; + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case IntMap.Tip(key, value) => f((key, value)) + case IntMap.Nil => } - override def keysIterator : Iterator[Int] = this match { - case IntMap.Nil => Iterator.empty; - case _ => new IntMapKeyIterator(this); + override def keysIterator: Iterator[Int] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapKeyIterator(this) } /** - * Loop over the keys of the map. The same as keys.foreach(f), but may + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may * be more efficient. * * @param f The loop body */ - final def foreachKey(f : Int => Unit) : Unit = this match { - case IntMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); } - case IntMap.Tip(key, _) => f(key); - case IntMap.Nil => {} + final def foreachKey(f: Int => Unit): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case IntMap.Tip(key, _) => f(key) + case IntMap.Nil => } - override def valuesIterator : Iterator[T] = this match { - case IntMap.Nil => Iterator.empty; - case _ => new IntMapValueIterator(this); + override def valuesIterator: Iterator[T] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapValueIterator(this) } /** - * Loop over the keys of the map. The same as keys.foreach(f), but may + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may * be more efficient. * * @param f The loop body */ - final def foreachValue(f : T => Unit) : Unit = this match { - case IntMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); } - case IntMap.Tip(_, value) => f(value); - case IntMap.Nil => {}; + final def foreachValue(f: T => Unit): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case IntMap.Tip(_, value) => f(value) + case IntMap.Nil => } override def stringPrefix = "IntMap" - override def isEmpty = this == IntMap.Nil; + override def isEmpty = this == IntMap.Nil - override def filter(f : ((Int, T)) => Boolean) : IntMap[T] = this match { + override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { case IntMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)); - if ((left eq newleft) && (right eq newright)) this; - else bin(prefix, mask, newleft, newright); + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) } case IntMap.Tip(key, value) => if (f((key, value))) this - else IntMap.Nil; - case IntMap.Nil => IntMap.Nil; + else IntMap.Nil + case IntMap.Nil => IntMap.Nil } - def transform[S](f : (Int, T) => S) : IntMap[S] = this match { - case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)); - case t@IntMap.Tip(key, value) => t.withValue(f(key, value)); - case IntMap.Nil => IntMap.Nil; + def transform[S](f: (Int, T) => S): IntMap[S] = this match { + case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) + case IntMap.Nil => IntMap.Nil } - final override def size : Int = this match { - case IntMap.Nil => 0; - case IntMap.Tip(_, _) => 1; - case IntMap.Bin(_, _, left, right) => left.size + right.size; + final override def size: Int = this match { + case IntMap.Nil => 0 + case IntMap.Tip(_, _) => 1 + case IntMap.Bin(_, _, left, right) => left.size + right.size } - final def get(key : Int) : Option[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key); - case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None; - case IntMap.Nil => None; + final def get(key: Int): Option[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None + case IntMap.Nil => None } - final override def getOrElse[S >: T](key : Int, default : =>S) : S = this match { - case IntMap.Nil => default; - case IntMap.Tip(key2, value) => if (key == key2) value else default; - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default); + final override def getOrElse[S >: T](key: Int, default: => S): S = this match { + case IntMap.Nil => default + case IntMap.Tip(key2, value) => if (key == key2) value else default + case IntMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) } - final override def apply(key : Int) : T = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key); - case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found"); - case IntMap.Nil => sys.error("key not found"); + final override def apply(key: Int): T = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") + case IntMap.Nil => sys.error("key not found") } def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) - override def updated[S >: T](key : Int, value : S) : IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this); - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) - else IntMap.Bin(prefix, mask, left, right.updated(key, value)); - case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, value); - else join(key, IntMap.Tip(key, value), key2, this); - case IntMap.Nil => IntMap.Tip(key, value); + override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) + else IntMap.Bin(prefix, mask, left, right.updated(key, value)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, value) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) } /** @@ -284,7 +286,7 @@ extends AbstractMap[Int, T] * Equivalent to: * {{{ * this.get(key) match { - * case None => this.update(key, value); + * case None => this.update(key, value) * case Some(oldvalue) => this.update(key, f(oldvalue, value) * } * }}} @@ -295,24 +297,26 @@ extends AbstractMap[Int, T] * @param f The function used to resolve conflicts. * @return The updated map. */ - def updateWith[S >: T](key : Int, value : S, f : (T, S) => S) : IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this); - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)); - case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, f(value2, value)); - else join(key, IntMap.Tip(key, value), key2, this); - case IntMap.Nil => IntMap.Tip(key, value); + def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, f(value2, value)) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) } - def - (key : Int) : IntMap[T] = this match { + def - (key: Int): IntMap[T] = this match { case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this; - else if (zero(key, mask)) bin(prefix, mask, left - key, right); - else bin(prefix, mask, left, right - key); + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) case IntMap.Tip(key2, _) => - if (key == key2) IntMap.Nil; - else this; - case IntMap.Nil => IntMap.Nil; + if (key == key2) IntMap.Nil + else this + case IntMap.Nil => IntMap.Nil } /** @@ -324,7 +328,7 @@ extends AbstractMap[Int, T] * @param f The transforming function. * @return The modified map. */ - def modifyOrRemove[S](f : (Int, T) => Option[S]) : IntMap[S] = this match { + def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { case IntMap.Bin(prefix, mask, left, right) => val newleft = left.modifyOrRemove(f) val newright = right.modifyOrRemove(f) @@ -350,25 +354,25 @@ extends AbstractMap[Int, T] * @param f The function used to resolve conflicts between two mappings. * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. */ - def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{ + def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed - else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1); - else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)); + if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed + else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed - else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2); - else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)); + if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed + else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) } else { - if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)); - else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed + if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed } - case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)); - case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)); - case (IntMap.Nil, x) => x; - case (x, IntMap.Nil) => x; + case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) + case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (IntMap.Nil, x) => x + case (x, IntMap.Nil) => x } /** @@ -382,13 +386,13 @@ extends AbstractMap[Int, T] * @param f The combining function. * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. */ - def intersectionWith[S, R](that : IntMap[S], f : (Int, T, S) => R) : IntMap[R] = (this, that) match { + def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => if (shorter(m1, m2)) { if (!hasMatch(p2, p1, m1)) IntMap.Nil else if (zero(p2, m1)) l1.intersectionWith(that, f) else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)); + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) else { if (!hasMatch(p1, p2, m2)) IntMap.Nil else if (zero(p1, m2)) this.intersectionWith(l2, f) @@ -413,15 +417,16 @@ extends AbstractMap[Int, T] * @param that The map to intersect with. * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. */ - def intersection[R](that : IntMap[R]) : IntMap[T] = this.intersectionWith(that, (key : Int, value : T, value2 : R) => value); + def intersection[R](that: IntMap[R]): IntMap[T] = + this.intersectionWith(that, (key: Int, value: T, value2: R) => value) - def ++[S >: T](that : IntMap[S]) = + def ++[S >: T](that: IntMap[S]) = this.unionWith[S](that, (key, x, y) => y) /** * The entry with the lowest key value considered in unsigned order. */ - final def firstKey : Int = this match { + final def firstKey: Int = this match { case Bin(_, _, l, r) => l.firstKey case Tip(k, v) => k case IntMap.Nil => sys.error("Empty set") @@ -430,7 +435,7 @@ extends AbstractMap[Int, T] /** * The entry with the highest key value considered in unsigned order. */ - final def lastKey : Int = this match { + final def lastKey: Int = this match { case Bin(_, _, l, r) => r.lastKey case Tip(k, v) => k case IntMap.Nil => sys.error("Empty set") diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 8a316f37de..002027b162 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder } private[immutable] object LongMapUtils extends BitOperations.Long { def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) - def join[T](p1 : Long, t1 : LongMap[T], p2 : Long, t2 : LongMap[T]) : LongMap[T] = { - val m = branchMask(p1, p2); - val p = mask(p1, m); + def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) - else LongMap.Bin(p, m, t2, t1); + else LongMap.Bin(p, m, t2, t1) } - def bin[T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) : LongMap[T] = (left, right) match { - case (left, LongMap.Nil) => left; - case (LongMap.Nil, right) => right; - case (left, right) => LongMap.Bin(prefix, mask, left, right); + def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { + case (left, LongMap.Nil) => left + case (LongMap.Nil, right) => right + case (left, right) => LongMap.Bin(prefix, mask, left, right) } } @@ -49,29 +49,29 @@ object LongMap { def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B]) } - def empty[T] : LongMap[T] = LongMap.Nil; - def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value); - def apply[T](elems : (Long, T)*) : LongMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)); + def empty[T]: LongMap[T] = LongMap.Nil + def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) + def apply[T](elems: (Long, T)*): LongMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) private[immutable] case object Nil extends LongMap[Nothing] { // Important, don't remove this! See IntMap for explanation. override def equals(that : Any) = that match { - case (that : AnyRef) if (this eq that) => true; - case (that : LongMap[_]) => false; // The only empty LongMaps are eq Nil - case that => super.equals(that); + case (that: AnyRef) if (this eq that) => true + case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil + case that => super.equals(that) } - }; + } - private[immutable] case class Tip[+T](key : Long, value : T) extends LongMap[T]{ - def withValue[S](s : S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]]; - else LongMap.Tip(key, s); + private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] + else LongMap.Tip(key, s) } - private[immutable] case class Bin[+T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) extends LongMap[T]{ - def bin[S](left : LongMap[S], right : LongMap[S]) : LongMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]]; - else LongMap.Bin[S](prefix, mask, left, right); + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { + def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] + else LongMap.Bin[S](prefix, mask, left, right) } } } @@ -79,64 +79,62 @@ object LongMap { import LongMap._ // Iterator over a non-empty LongMap. -private[immutable] abstract class LongMapIterator[V, T](it : LongMap[V]) extends AbstractIterator[T] { +private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { // Basically this uses a simple stack to emulate conversion over the tree. However // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack // depth is 65 - var index = 0; - var buffer = new Array[AnyRef](65); + var index = 0 + var buffer = new Array[AnyRef](65) def pop() = { - index -= 1; - buffer(index).asInstanceOf[LongMap[V]]; + index -= 1 + buffer(index).asInstanceOf[LongMap[V]] } - def push(x : LongMap[V]) { - buffer(index) = x.asInstanceOf[AnyRef]; - index += 1; + def push(x: LongMap[V]) { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 } push(it); /** * What value do we assign to a tip? */ - def valueOf(tip : LongMap.Tip[V]) : T; + def valueOf(tip: LongMap.Tip[V]): T - def hasNext = index != 0; - final def next : T = + def hasNext = index != 0 + final def next: T = pop() match { case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { - push(right); - valueOf(t); + push(right) + valueOf(t) } case LongMap.Bin(_, _, left, right) => { - push(right); - push(left); - next; + push(right) + push(left) + next } - case t@LongMap.Tip(_, _) => valueOf(t); + case t@LongMap.Tip(_, _) => valueOf(t) // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap // and don't return an LongMapIterator for LongMap.Nil. - case LongMap.Nil => sys.error("Empty maps not allowed as subtrees"); + case LongMap.Nil => sys.error("Empty maps not allowed as subtrees") } } -private[immutable] class LongMapEntryIterator[V](it : LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ - def valueOf(tip : LongMap.Tip[V]) = (tip.key, tip.value); +private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ + def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) } -private[immutable] class LongMapValueIterator[V](it : LongMap[V]) extends LongMapIterator[V, V](it){ - def valueOf(tip : LongMap.Tip[V]) = tip.value; +private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.value } -private[immutable] class LongMapKeyIterator[V](it : LongMap[V]) extends LongMapIterator[V, Long](it){ - def valueOf(tip : LongMap.Tip[V]) = tip.key; +private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.key } -import LongMap._; - /** * Specialised immutable map structure for long keys, based on * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a> @@ -157,12 +155,12 @@ extends AbstractMap[Long, T] with Map[Long, T] with MapLike[Long, T, LongMap[T]] { - override def empty: LongMap[T] = LongMap.Nil; + override def empty: LongMap[T] = LongMap.Nil override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Long, T)]; - foreach(buffer += _); - buffer.toList; + val buffer = new scala.collection.mutable.ListBuffer[(Long, T)] + foreach(buffer += _) + buffer.toList } /** @@ -171,22 +169,22 @@ extends AbstractMap[Long, T] * @return an iterator over pairs of long keys and corresponding values. */ def iterator: Iterator[(Long, T)] = this match { - case LongMap.Nil => Iterator.empty; - case _ => new LongMapEntryIterator(this); + case LongMap.Nil => Iterator.empty + case _ => new LongMapEntryIterator(this) } /** * Loops over the key, value pairs of the map in unsigned order of the keys. */ - override final def foreach[U](f : ((Long, T)) => U) : Unit = this match { - case LongMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); } + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } case LongMap.Tip(key, value) => f((key, value)); - case LongMap.Nil => {}; + case LongMap.Nil => } - override def keysIterator : Iterator[Long] = this match { - case LongMap.Nil => Iterator.empty; - case _ => new LongMapKeyIterator(this); + override def keysIterator: Iterator[Long] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapKeyIterator(this) } /** @@ -195,15 +193,15 @@ extends AbstractMap[Long, T] * * @param f The loop body */ - final def foreachKey(f : Long => Unit) : Unit = this match { - case LongMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); } - case LongMap.Tip(key, _) => f(key); - case LongMap.Nil => {} + final def foreachKey(f: Long => Unit): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case LongMap.Tip(key, _) => f(key) + case LongMap.Nil => } - override def valuesIterator : Iterator[T] = this match { - case LongMap.Nil => Iterator.empty; - case _ => new LongMapValueIterator(this); + override def valuesIterator: Iterator[T] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapValueIterator(this) } /** @@ -212,67 +210,70 @@ extends AbstractMap[Long, T] * * @param f The loop body */ - final def foreachValue(f : T => Unit) : Unit = this match { - case LongMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); } - case LongMap.Tip(_, value) => f(value); - case LongMap.Nil => {}; + final def foreachValue(f: T => Unit): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case LongMap.Tip(_, value) => f(value) + case LongMap.Nil => } override def stringPrefix = "LongMap" - override def isEmpty = this == LongMap.Nil; + override def isEmpty = this == LongMap.Nil - override def filter(f : ((Long, T)) => Boolean) : LongMap[T] = this match { + override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { case LongMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)); - if ((left eq newleft) && (right eq newright)) this; - else bin(prefix, mask, newleft, newright); + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) } case LongMap.Tip(key, value) => if (f((key, value))) this - else LongMap.Nil; - case LongMap.Nil => LongMap.Nil; + else LongMap.Nil + case LongMap.Nil => LongMap.Nil } - def transform[S](f : (Long, T) => S) : LongMap[S] = this match { - case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)); - case t@LongMap.Tip(key, value) => t.withValue(f(key, value)); - case LongMap.Nil => LongMap.Nil; + def transform[S](f: (Long, T) => S): LongMap[S] = this match { + case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) + case LongMap.Nil => LongMap.Nil } - final override def size : Int = this match { - case LongMap.Nil => 0; - case LongMap.Tip(_, _) => 1; - case LongMap.Bin(_, _, left, right) => left.size + right.size; + final override def size: Int = this match { + case LongMap.Nil => 0 + case LongMap.Tip(_, _) => 1 + case LongMap.Bin(_, _, left, right) => left.size + right.size } - final def get(key : Long) : Option[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key); - case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None; - case LongMap.Nil => None; + final def get(key: Long): Option[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None + case LongMap.Nil => None } - final override def getOrElse[S >: T](key : Long, default : =>S) : S = this match { - case LongMap.Nil => default; - case LongMap.Tip(key2, value) => if (key == key2) value else default; - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default); + final override def getOrElse[S >: T](key: Long, default: => S): S = this match { + case LongMap.Nil => default + case LongMap.Tip(key2, value) => if (key == key2) value else default + case LongMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) } - final override def apply(key : Long) : T = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key); - case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found"); - case LongMap.Nil => sys.error("key not found"); + final override def apply(key: Long): T = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") + case LongMap.Nil => sys.error("key not found") } def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) - override def updated[S >: T](key : Long, value : S) : LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this); - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) - else LongMap.Bin(prefix, mask, left, right.updated(key, value)); - case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, value); - else join(key, LongMap.Tip(key, value), key2, this); - case LongMap.Nil => LongMap.Tip(key, value); + override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) + else LongMap.Bin(prefix, mask, left, right.updated(key, value)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, value) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) } /** @@ -281,7 +282,7 @@ extends AbstractMap[Long, T] * Equivalent to * {{{ * this.get(key) match { - * case None => this.update(key, value); + * case None => this.update(key, value) * case Some(oldvalue) => this.update(key, f(oldvalue, value) * } * }}} @@ -292,24 +293,26 @@ extends AbstractMap[Long, T] * @param f The function used to resolve conflicts. * @return The updated map. */ - def updateWith[S >: T](key : Long, value : S, f : (T, S) => S) : LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this); - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)); - case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, f(value2, value)); - else join(key, LongMap.Tip(key, value), key2, this); - case LongMap.Nil => LongMap.Tip(key, value); + def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, f(value2, value)) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) } - def -(key : Long) : LongMap[T] = this match { + def -(key: Long): LongMap[T] = this match { case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this; - else if (zero(key, mask)) bin(prefix, mask, left - key, right); - else bin(prefix, mask, left, right - key); + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) case LongMap.Tip(key2, _) => - if (key == key2) LongMap.Nil; - else this; - case LongMap.Nil => LongMap.Nil; + if (key == key2) LongMap.Nil + else this + case LongMap.Nil => LongMap.Nil } /** @@ -321,21 +324,21 @@ extends AbstractMap[Long, T] * @param f The transforming function. * @return The modified map. */ - def modifyOrRemove[S](f : (Long, T) => Option[S]) : LongMap[S] = this match { + def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { case LongMap.Bin(prefix, mask, left, right) => { - val newleft = left.modifyOrRemove(f); - val newright = right.modifyOrRemove(f); - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]]; + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] else bin(prefix, mask, newleft, newright) } case LongMap.Tip(key, value) => f(key, value) match { - case None => LongMap.Nil; + case None => LongMap.Nil case Some(value2) => //hack to preserve sharing if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] - else LongMap.Tip(key, value2); + else LongMap.Tip(key, value2) } - case LongMap.Nil => LongMap.Nil; + case LongMap.Nil => LongMap.Nil } /** @@ -346,25 +349,25 @@ extends AbstractMap[Long, T] * @param f The function used to resolve conflicts between two mappings. * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. */ - def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{ + def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed - else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1); - else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)); + if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed + else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed - else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2); - else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)); + if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed + else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) } else { - if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)); - else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed + if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed } - case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)); // TODO: remove [S] when SI-5548 is fixed - case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)); - case (LongMap.Nil, x) => x; - case (x, LongMap.Nil) => x; + case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when SI-5548 is fixed + case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (LongMap.Nil, x) => x + case (x, LongMap.Nil) => x } /** @@ -378,27 +381,27 @@ extends AbstractMap[Long, T] * @param f The combining function. * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. */ - def intersectionWith[S, R](that : LongMap[S], f : (Long, T, S) => R) : LongMap[R] = (this, that) match { + def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) LongMap.Nil; - else if (zero(p2, m1)) l1.intersectionWith(that, f); - else r1.intersectionWith(that, f); - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)); + if (!hasMatch(p2, p1, m1)) LongMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) else { - if (!hasMatch(p1, p2, m2)) LongMap.Nil; - else if (zero(p1, m2)) this.intersectionWith(l2, f); - else this.intersectionWith(r2, f); + if (!hasMatch(p1, p2, m2)) LongMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) } case (LongMap.Tip(key, value), that) => that.get(key) match { - case None => LongMap.Nil; - case Some(value2) => LongMap.Tip(key, f(key, value, value2)); + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value, value2)) } case (_, LongMap.Tip(key, value)) => this.get(key) match { - case None => LongMap.Nil; - case Some(value2) => LongMap.Tip(key, f(key, value2, value)); + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value2, value)) } - case (_, _) => LongMap.Nil; + case (_, _) => LongMap.Nil } /** @@ -409,9 +412,10 @@ extends AbstractMap[Long, T] * @param that The map to intersect with. * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. */ - def intersection[R](that : LongMap[R]) : LongMap[T] = this.intersectionWith(that, (key : Long, value : T, value2 : R) => value); + def intersection[R](that: LongMap[R]): LongMap[T] = + this.intersectionWith(that, (key: Long, value: T, value2: R) => value) - def ++[S >: T](that : LongMap[S]) = + def ++[S >: T](that: LongMap[S]) = this.unionWith[S](that, (key, x, y) => y) } diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 6ae2d78188..2dc08fff24 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -48,7 +48,8 @@ import parallel.immutable.ParMap trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] extends scala.collection.MapLike[A, B, This] with Parallelizable[(A, B), ParMap[A, B]] -{ self => +{ +self => protected[this] override def parCombiner = ParMap.newCombiner[A, B] @@ -84,31 +85,20 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] */ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] = ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _) - + /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - override def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] { - override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def iterator = self.iterator.filter(kv => p(kv._1)) - override def contains(key: A) = self.contains(key) && p(key) - def get(key: A) = if (!p(key)) None else self.get(key) - } - + override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B] + /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - override def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] { - override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v))) - def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) - override def size = self.size - override def contains(key: A) = self.contains(key) - def get(key: A) = self.get(key).map(f) - } + override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C] /** Collects all keys of this map in a set. * @return a set containing all keys of this map. diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 0f28c4997b..4b573511d1 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -43,7 +43,7 @@ object RedBlackTree { } def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) + def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { case (Some(from), Some(until)) => this.range(tree, from, until) @@ -122,17 +122,18 @@ object RedBlackTree { else mkTree(isBlack, x, xv, a, r) } - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { RedTree(k, v, null, null) } else { val cmp = ordering.compare(k, tree.key) - if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right) - else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v)) - else mkTree(isBlackTree(tree), k, v, tree.left, tree.right) + if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right) + else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite)) + else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right) + else tree } - // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html + /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + * http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { if (isRedTree(tr)) { @@ -216,7 +217,7 @@ object RedBlackTree { if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) val newLeft = doFrom(tree.left, from) if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value) + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false) else rebalance(tree, newLeft, tree.right) } private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -224,7 +225,7 @@ object RedBlackTree { if (ordering.lt(to, tree.key)) return doTo(tree.left, to) val newRight = doTo(tree.right, to) if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else if (newRight eq null) upd(tree.left, tree.key, tree.value, false) else rebalance(tree, tree.left, newRight) } private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -232,7 +233,7 @@ object RedBlackTree { if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) val newRight = doUntil(tree.right, until) if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else if (newRight eq null) upd(tree.left, tree.key, tree.value, false) else rebalance(tree, tree.left, newRight) } private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -242,8 +243,8 @@ object RedBlackTree { val newLeft = doFrom(tree.left, from) val newRight = doUntil(tree.right, until) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value); - else if (newRight eq null) upd(newLeft, tree.key, tree.value); + else if (newLeft eq null) upd(newRight, tree.key, tree.value, false); + else if (newRight eq null) upd(newLeft, tree.key, tree.value, false); else rebalance(tree, newLeft, newRight) } @@ -254,7 +255,7 @@ object RedBlackTree { if (n > count) return doDrop(tree.right, n - count - 1) val newLeft = doDrop(tree.left, n) if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value) + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false) else rebalance(tree, newLeft, tree.right) } private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = { @@ -264,7 +265,7 @@ object RedBlackTree { if (n <= count) return doTake(tree.left, n) val newRight = doTake(tree.right, n - count - 1) if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else if (newRight eq null) upd(tree.left, tree.key, tree.value, false) else rebalance(tree, tree.left, newRight) } private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = { @@ -275,8 +276,8 @@ object RedBlackTree { val newLeft = doDrop(tree.left, from) val newRight = doTake(tree.right, until - count - 1) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value) - else if (newRight eq null) upd(newLeft, tree.key, tree.value) + else if (newLeft eq null) upd(newRight, tree.key, tree.value, false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, false) else rebalance(tree, newLeft, newRight) } diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 526f7a1ffe..f147b673f7 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -30,7 +30,9 @@ import annotation.unchecked.uncheckedVariance trait SortedMap[A, +B] extends Map[A, B] with scala.collection.SortedMap[A, B] with MapLike[A, B, SortedMap[A, B]] - with SortedMapLike[A, B, SortedMap[A, B]] { self => + with SortedMapLike[A, B, SortedMap[A, B]] +{ +self => override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B] @@ -76,6 +78,17 @@ trait SortedMap[A, +B] extends Map[A, B] */ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) + + override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { + implicit def ordering: Ordering[A] = self.ordering + override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) + } + + override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { + implicit def ordering: Ordering[A] = self.ordering + override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) + } + } /** $factoryInfo @@ -86,4 +99,20 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] { /** $sortedMapCanBuildFromInfo */ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B] + + private[collection] trait Default[A, +B] extends SortedMap[A, B] with collection.SortedMap.Default[A, B] { + self => + override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { + val b = SortedMap.newBuilder[A, B1] + b ++= this + b += ((kv._1, kv._2)) + b.result + } + + override def - (key: A): SortedMap[A, B] = { + val b = newBuilder + for (kv <- this; if kv._1 != key) b += kv + b.result + } + } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 4c1a5f2e03..51bc76efc3 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -131,7 +131,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi * @param value the value to be associated with `key` * @return a new $coll with the updated binding */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value)) + override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true)) /** Add a key/value pair to this map. * @tparam B1 type of the value of the new binding, a supertype of `B` @@ -171,7 +171,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi */ def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { assert(!RB.contains(tree, key)) - new TreeMap(RB.update(tree, key, value)) + new TreeMap(RB.update(tree, key, value, true)) } def - (key:A): TreeMap[A, B] = diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 882e828c5b..697da2bc4b 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -112,7 +112,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin * @param elem a new element to add. * @return a new $coll containing `elem` and all the elements of this $coll. */ - def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ())) + def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false)) /** A new `TreeSet` with the entry added is returned, * assuming that elem is <em>not</em> in the TreeSet. @@ -122,7 +122,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin */ def insert(elem: A): TreeSet[A] = { assert(!RB.contains(tree, elem)) - newSet(RB.update(tree, elem, ())) + newSet(RB.update(tree, elem, (), false)) } /** Creates a new `TreeSet` with the entry removed. diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index a7ec833193..82498f9b63 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -841,7 +841,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? - override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T] + override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] @@ -850,13 +850,13 @@ self: ParIterableLike[T, Repr, Sequential] => override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) - - // TODO(@alex22): make these better - override def toVector: Vector[T] = seq.toVector - - override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col] + override def toVector: Vector[T] = to[Vector] + override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { + toParCollection[T, Col[T]](() => cbf().asCombiner) + } else seq.to(cbf) + /* tasks */ protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] { diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala index d8c42d74b0..349f4fa44c 100644 --- a/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala @@ -34,6 +34,7 @@ extends collection/*.immutable*/.GenIterable[T] with collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]] + with Immutable { override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala index 700d21d0bb..b5747a31cf 100644 --- a/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala @@ -29,7 +29,8 @@ import scala.collection.GenIterable trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T] with collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] { + with ParIterableLike[T, ParIterable[T], Iterable[T]] + with Mutable { override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable //protected[this] override def newBuilder = ParIterable.newBuilder[T] diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index f753dfbcbb..860e7bac28 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -52,7 +52,7 @@ trait ClassTag[T] extends Equals with Serializable { * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)` * is uncheckable, but we have an instance of `ClassTag[T]`. */ - def unapply(x: Any): Option[T] = if (runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None + def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None /** case class accessories */ override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]] diff --git a/src/library/scala/reflect/compat.scala b/src/library/scala/reflect/compat.scala deleted file mode 100644 index fc0e5fbf9c..0000000000 --- a/src/library/scala/reflect/compat.scala +++ /dev/null @@ -1,33 +0,0 @@ -// [Eugene++] delete this once we merge with trunk and have a working IDE - -package scala.reflect { - trait ArrayTag[T] - trait ErasureTag[T] - trait ConcreteTypeTag[T] -} - -package scala.reflect.api { - trait TypeTags { - trait TypeTag[T] - trait ConcreteTypeTag[T] - } -} - -package scala { - import scala.reflect.base.{Universe => BaseUniverse} - - trait reflect_compat { - lazy val mirror: BaseUniverse = ??? - } -} - -package scala.reflect { - import language.experimental.macros - import scala.reflect.base.{Universe => BaseUniverse} - - trait internal_compat { - private[scala] def materializeArrayTag[T](u: BaseUniverse): ArrayTag[T] = ??? - private[scala] def materializeErasureTag[T](u: BaseUniverse): ErasureTag[T] = ??? - private[scala] def materializeConcreteTypeTag[T](u: BaseUniverse): ConcreteTypeTag[T] = ??? - } -}
\ No newline at end of file diff --git a/src/library/scala/reflect/makro/internal/package.scala b/src/library/scala/reflect/makro/internal/package.scala index d31a0f0d14..78cb0ffb10 100644 --- a/src/library/scala/reflect/makro/internal/package.scala +++ b/src/library/scala/reflect/makro/internal/package.scala @@ -9,7 +9,7 @@ import scala.reflect.base.{Universe => BaseUniverse} // // todo. once we have implicit macros for tag generation, we can remove these anchors // [Eugene++] how do I hide this from scaladoc? -package object internal extends scala.reflect.internal_compat { +package object internal { private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = macro ??? private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = macro ??? private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = macro ??? diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 0ee58df2cd..2ebc82875e 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -1,6 +1,6 @@ package scala -package object reflect extends reflect_compat { +package object reflect { lazy val basis: base.Universe = new base.Base diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala index d7f5a57f50..accda5b8f7 100644 --- a/src/library/scala/util/control/Breaks.scala +++ b/src/library/scala/util/control/Breaks.scala @@ -41,8 +41,8 @@ class Breaks { } } - trait TryBlock { - def catchBreak(onBreak: => Unit): Unit + sealed trait TryBlock[T] { + def catchBreak(onBreak: =>T): T } /** @@ -57,8 +57,8 @@ class Breaks { * } * }}} */ - def tryBreakable(op: => Unit) = new TryBlock { - def catchBreak(onBreak: => Unit) = try { + def tryBreakable[T](op: =>T) = new TryBlock[T] { + def catchBreak(onBreak: =>T) = try { op } catch { case ex: BreakControl => diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala index 8cbe3064ef..64afb1f10f 100644 --- a/src/library/scala/util/control/ControlThrowable.scala +++ b/src/library/scala/util/control/ControlThrowable.scala @@ -24,8 +24,9 @@ package scala.util.control * try { * // Body might throw arbitrarily * } catch { - * case ce : ControlThrowable => throw ce // propagate - * case t : Exception => log(t) // log and suppress + * case c: ControlThrowable => throw c // propagate + * case t: Exception => log(t) // log and suppress + * } * }}} * * @author Miles Sabin diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala index 2223a6db0f..2aa9a99054 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala @@ -336,7 +336,6 @@ import ILGenerator._ emitSpecialLabel(Label.Try) val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ??? excStack.push(Label.Try, endExc) - return endExc } /** Begins a catch block. */ diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index b62a92cbd7..b797c71f6d 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -121,7 +121,7 @@ trait Types extends base.Types { self: Universe => * class C extends p.D[Int] * T.asSeenFrom(ThisType(C), D) (where D is owner of m) * = Int - * }}} + * }}} */ def asSeenFrom(pre: Type, clazz: Symbol): Type @@ -171,6 +171,15 @@ trait Types extends base.Types { self: Universe => */ def widen: Type + /** Map to a singleton type which is a subtype of this type. + * The fallback implemented here gives: + * {{{ + * T.narrow = (T {}).this.type + * }}} + * Overridden where we know more about where types come from. + */ + def narrow: Type + /** The string discriminator of this type; useful for debugging */ def kind: String } @@ -365,4 +374,3 @@ trait Types extends base.Types { self: Universe => */ def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type } - diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 18adab7c68..5ae8f22c64 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -129,11 +129,15 @@ abstract class SymbolTable extends makro.Universe // sigh, this has to be public or atPhase doesn't inline. var phStack: List[Phase] = Nil - private var ph: Phase = NoPhase - private var per = NoPeriod + private[this] var ph: Phase = NoPhase + private[this] var per = NoPeriod final def atPhaseStack: List[Phase] = phStack - final def phase: Phase = ph + final def phase: Phase = { + if (Statistics.hotEnabled) + Statistics.incCounter(SymbolTableStats.phaseCounter) + ph + } def atPhaseStackMessage = atPhaseStack match { case Nil => "" @@ -330,3 +334,7 @@ abstract class SymbolTable extends makro.Universe */ def isCompilerUniverse = false } + +object SymbolTableStats { + val phaseCounter = Statistics.newCounter("#phase calls") +} diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 4b0ceeb86b..79041924a8 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -645,6 +645,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } final def flags: Long = { + if (Statistics.hotEnabled) Statistics.incCounter(flagsCount) val fs = _rawflags & phase.flagMask (fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift) } @@ -936,7 +937,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ owner attribute -------------------------------------------------------------- - def owner: Symbol = rawowner + def owner: Symbol = { + Statistics.incCounter(ownerCount) + rawowner + } + // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, // e.g. after flatten all classes are owned by package classes, there are lots and @@ -1433,24 +1438,28 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def classBound: Type = { val tp = refinedType(info.parents, owner) - val thistp = tp.typeSymbol.thisType - val oldsymbuf = new ListBuffer[Symbol] - val newsymbuf = new ListBuffer[Symbol] - for (sym <- info.decls) { - // todo: what about public references to private symbols? - if (sym.isPublic && !sym.isConstructor) { - oldsymbuf += sym - newsymbuf += ( - if (sym.isClass) - tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound) - else - sym.cloneSymbol(tp.typeSymbol)) + // SI-4589 refinedType only creates a new refinement class symbol before erasure; afterwards + // the first parent class is returned, to which we must not add members. + if (!phase.erasedTypes) { + val thistp = tp.typeSymbol.thisType + val oldsymbuf = new ListBuffer[Symbol] + val newsymbuf = new ListBuffer[Symbol] + for (sym <- info.decls) { + // todo: what about public references to private symbols? + if (sym.isPublic && !sym.isConstructor) { + oldsymbuf += sym + newsymbuf += ( + if (sym.isClass) + tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound) + else + sym.cloneSymbol(tp.typeSymbol)) + } + } + val oldsyms = oldsymbuf.toList + val newsyms = newsymbuf.toList + for (sym <- newsyms) { + addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms))) } - } - val oldsyms = oldsymbuf.toList - val newsyms = newsymbuf.toList - for (sym <- newsyms) { - addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms))) } tp } @@ -2324,7 +2333,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var _rawname: TermName = initName def rawname = _rawname - def name = _rawname + def name = { + Statistics.incCounter(nameCount) + _rawname + } def name_=(name: Name) { if (name != rawname) { log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name)) @@ -2493,11 +2505,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def companionClass = flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this) - override def owner = ( + override def owner = { + Statistics.incCounter(ownerCount) if (!isMethod && needsFlatClasses) rawowner.owner else rawowner - ) - override def name: TermName = ( + } + override def name: TermName = { + Statistics.incCounter(nameCount) if (!isMethod && needsFlatClasses) { if (flatname eq null) flatname = nme.flattenedName(rawowner.name, rawname) @@ -2505,7 +2519,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => flatname } else rawname - ) + } } implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol]) @@ -2576,7 +2590,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => // cloneSymbolImpl still abstract in TypeSymbol. def rawname = _rawname - def name = _rawname + def name = { + Statistics.incCounter(nameCount) + _rawname + } final def asNameType(n: Name) = n.toTypeName override def isNonClassType = true @@ -2888,10 +2905,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => thisTypeCache } - override def owner: Symbol = + override def owner: Symbol = { + Statistics.incCounter(ownerCount) if (needsFlatClasses) rawowner.owner else rawowner + } - override def name: TypeName = ( + override def name: TypeName = { + Statistics.incCounter(nameCount) if (needsFlatClasses) { if (flatname eq null) flatname = nme.flattenedName(rawowner.name, rawname).toTypeName @@ -2899,7 +2919,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => flatname } else rawname - ) + } /** A symbol carrying the self type of the class as its type */ override def thisSym: Symbol = thissym @@ -3194,4 +3214,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => object SymbolsStats { val typeSymbolCount = Statistics.newCounter("#type symbols") val classSymbolCount = Statistics.newCounter("#class symbols") + val flagsCount = Statistics.newCounter("#flags ops") + val ownerCount = Statistics.newCounter("#owner ops") + val nameCount = Statistics.newCounter("#name ops") } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index d4b895bcb4..4cf2cceb81 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -679,7 +679,7 @@ trait Types extends api.Types { self: SymbolTable => else { // scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") { Statistics.incCounter(asSeenFromCount) - val start = Statistics.startTimer(asSeenFromNanos) + val start = Statistics.pushTimer(typeOpsStack, asSeenFromNanos) val m = new AsSeenFromMap(pre.normalize, clazz) val tp = m apply this val tp1 = existentialAbstraction(m.capturedParams, tp) @@ -687,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable => if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) - Statistics.stopTimer(asSeenFromNanos, start) + Statistics.popTimer(typeOpsStack, start) result } } @@ -826,12 +826,12 @@ trait Types extends api.Types { self: SymbolTable => def stat_<:<(that: Type): Boolean = { Statistics.incCounter(subtypeCount) - val start = Statistics.startTimer(subtypeNanos) + val start = Statistics.pushTimer(typeOpsStack, subtypeNanos) val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType, this, that) else isSubType(this, that, AnyDepth)) - Statistics.stopTimer(subtypeNanos, start) + Statistics.popTimer(typeOpsStack, start) result } @@ -839,12 +839,12 @@ trait Types extends api.Types { self: SymbolTable => */ def weak_<:<(that: Type): Boolean = { Statistics.incCounter(subtypeCount) - val start = Statistics.startTimer(subtypeNanos) + val start = Statistics.pushTimer(typeOpsStack, subtypeNanos) val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - Statistics.stopTimer(subtypeNanos, start) + Statistics.popTimer(typeOpsStack, start) result } @@ -1018,7 +1018,7 @@ trait Types extends api.Types { self: SymbolTable => val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this) Statistics.incCounter(findMemberCount) - val start = Statistics.startTimer(findMemberNanos) + val start = Statistics.pushTimer(typeOpsStack, findMemberNanos) //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG var members: Scope = null @@ -1045,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable => !sym.isPrivateLocal || (bcs0.head.hasTransOwner(bcs.head)))) { if (name.isTypeName || stableOnly && sym.isStable) { - Statistics.stopTimer(findMemberNanos, start) + Statistics.popTimer(typeOpsStack, start) if (suspension ne null) suspension foreach (_.suspended = false) return sym } else if (member == NoSymbol) { @@ -1091,7 +1091,7 @@ trait Types extends api.Types { self: SymbolTable => } // while (!bcs.isEmpty) excluded = excludedFlags } // while (continue) - Statistics.stopTimer(findMemberNanos, start) + Statistics.popTimer(typeOpsStack, start) if (suspension ne null) suspension foreach (_.suspended = false) if (members eq null) { if (member == NoSymbol) Statistics.incCounter(noMemberCount) @@ -1534,11 +1534,17 @@ trait Types extends api.Types { self: SymbolTable => tpe.baseTypeSeqCache = bts lateMap paramToVar } else { Statistics.incCounter(compoundBaseTypeSeqCount) - tpe.baseTypeSeqCache = undetBaseTypeSeq - tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass) - tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe) - else - compoundBaseTypeSeq(tpe) + val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) + try { + tpe.baseTypeSeqCache = undetBaseTypeSeq + tpe.baseTypeSeqCache = + if (tpe.typeSymbol.isRefinementClass) + tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe) + else + compoundBaseTypeSeq(tpe) + } finally { + Statistics.popTimer(typeOpsStack, start) + } // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors // when compiling with // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala @@ -2390,8 +2396,13 @@ trait Types extends api.Types { self: SymbolTable => tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { Statistics.incCounter(typerefBaseTypeSeqCount) - tpe.baseTypeSeqCache = undetBaseTypeSeq - tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl + val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) + try { + tpe.baseTypeSeqCache = undetBaseTypeSeq + tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl + } finally { + Statistics.popTimer(typeOpsStack, start) + } } } if (tpe.baseTypeSeqCache == undetBaseTypeSeq) @@ -6306,13 +6317,13 @@ trait Types extends api.Types { self: SymbolTable => case List(t) => t case _ => Statistics.incCounter(lubCount) - val start = Statistics.startTimer(lubNanos) + val start = Statistics.pushTimer(typeOpsStack, lubNanos) try { lub(ts, lubDepth(ts)) } finally { lubResults.clear() glbResults.clear() - Statistics.stopTimer(lubNanos, start) + Statistics.popTimer(typeOpsStack, start) } } @@ -6454,13 +6465,13 @@ trait Types extends api.Types { self: SymbolTable => case List(t) => t case ts0 => Statistics.incCounter(lubCount) - val start = Statistics.startTimer(lubNanos) + val start = Statistics.pushTimer(typeOpsStack, lubNanos) try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - Statistics.stopTimer(lubNanos, start) + Statistics.popTimer(typeOpsStack, start) } } @@ -6884,11 +6895,13 @@ object TypesStats { val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount) val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount) val typerNanos = Statistics.newTimer ("time spent typechecking", "typer") - val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos) - val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos) - val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos) - val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos) + val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos) + val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos) + val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos) + val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos) + val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos) val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) + val typeOpsStack = Statistics.newTimerStack() } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 57c9e98174..3a31c2858b 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -4,6 +4,8 @@ import collection.mutable object Statistics { + type TimerSnapshot = (Long, Long) + /** If enabled, increment counter by one */ @inline final def incCounter(c: Counter) { if (_enabled && c != null) c.value += 1 @@ -30,20 +32,20 @@ object Statistics { } /** If enabled, start timer */ - @inline final def startTimer(tm: Timer): (Long, Long) = + @inline final def startTimer(tm: Timer): TimerSnapshot = if (_enabled && tm != null) tm.start() else null /** If enabled, stop timer */ - @inline final def stopTimer(tm: Timer, start: (Long, Long)) { + @inline final def stopTimer(tm: Timer, start: TimerSnapshot) { if (_enabled && tm != null) tm.stop(start) } /** If enabled, push and start a new timer in timer stack */ - @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) = - if (_enabled && timers != null) timers.push(cls) else null + @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = + if (_enabled && timers != null) timers.push(timer) else null /** If enabled, stop and pop timer from timer stack */ - @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) { + @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) { if (_enabled && timers != null) timers.pop(prev) } @@ -73,6 +75,13 @@ object Statistics { */ def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer) + /** Create a new stackable that shows as `prefix` and is active + * in the same phases as its base timer. Stackable timers are subtimers + * that can be stacked ina timerstack, and that print aggregate, as well as specific + * durations. + */ + def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer) + /** Create a new view that shows as `prefix` and is active in given phases. * The view always reflects the current value of `quant` as a quantity. */ @@ -86,20 +95,27 @@ quant) /** Same as newQuantMap, where the key type is fixed to be Class[_] */ def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue) - /** Create a new timer stack map, indexed by Class[_]. */ - def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying) + /** Create a new timer stack */ + def newTimerStack() = new TimerStack() def allQuantities: Iterable[Quantity] = - for ((q, _) <- qs if !q.isInstanceOf[SubQuantity]; + for ((_, q) <- qs if q.underlying == q; r <- q :: q.children.toList if r.prefix.nonEmpty) yield r private def showPercent(x: Double, base: Double) = if (base == 0) "" else f" (${x / base * 100}%2.1f%)" + /** The base trait for quantities. + * Quantities with non-empty prefix are printed in the statistics info. + */ trait Quantity { - qs += (this -> ()) + if (prefix.nonEmpty) { + val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" + qs(key) = this + } val prefix: String val phases: Seq[String] + def underlying: Quantity = this def showAt(phase: String) = phases.isEmpty || (phases contains phase) def line = f"$prefix%-30s: ${this}" val children = new mutable.ListBuffer[Quantity] @@ -123,7 +139,7 @@ quant) override def toString = quant.toString } - private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { + private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { override def toString = if (value == 0) "0" else { @@ -142,33 +158,39 @@ quant) value + showPercent(value, underlying.value) } - class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] { + class Timer(val prefix: String, val phases: Seq[String]) extends Quantity { var nanos: Long = 0 var timings = 0 - def compare(that: Timer): Int = - if (this.nanos < that.nanos) -1 - else if (this.nanos > that.nanos) 1 - else 0 def start() = { (nanos, System.nanoTime()) } - def stop(prev: (Long, Long)) { + def stop(prev: TimerSnapshot) { val (nanos0, start) = prev nanos = nanos0 + System.nanoTime() - start timings += 1 } - override def toString = s"$timings spans, ${nanos/1000}ms" + protected def show(ns: Long) = s"${ns/1000}ms" + override def toString = s"$timings spans, ${show(nanos)}" } - private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity { - override def toString: String = super.toString + showPercent(nanos, underlying.nanos) + class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity { + override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos) + } + + class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] { + var specificNanos: Long = 0 + def compare(that: StackableTimer): Int = + if (this.specificNanos < that.specificNanos) -1 + else if (this.specificNanos > that.specificNanos) 1 + else 0 + override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific" } /** A mutable map quantity where missing elements are automatically inserted * on access by executing `initValue`. */ class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V) - extends scala.collection.mutable.HashMap[K, V] with Quantity { + extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity { override def default(key: K) = { val elem = initValue this(key) = elem @@ -183,23 +205,25 @@ quant) }.mkString(", ") } - /** A mutable map quantity that takes class keys to subtimer values, relative to - * some `underlying` timer. In addition, class timers can be pushed and popped. - * Pushing the timer for a class means stopping the currently active timer. + /** A stack of timers, all active, where a timer's specific "clock" + * is stopped as long as it is buried by some other timer in the stack, but + * its aggregate clock keeps on ticking. */ - class ByClassTimerStack(prefix: String, val underlying: Timer) - extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity { - private var elems: List[(Timer, Long)] = Nil - def push(cls: Class[_]): (Long, Long) = { - val topTimer = this(cls) - elems = (topTimer, 0L) :: elems - topTimer.start() + class TimerStack { + private var elems: List[(StackableTimer, Long)] = Nil + /** Start given timer and push it onto the stack */ + def push(t: StackableTimer): TimerSnapshot = { + elems = (t, 0L) :: elems + t.start() } - def pop(prev: (Long, Long)) = { + /** Stop and pop top timer in stack + */ + def pop(prev: TimerSnapshot) = { val (nanos0, start) = prev val duration = System.nanoTime() - start val (topTimer, nestedNanos) :: rest = elems - topTimer.nanos = nanos0 + duration - nestedNanos + topTimer.nanos = nanos0 + duration + topTimer.specificNanos += duration - nestedNanos topTimer.timings += 1 elems = rest match { case (outerTimer, outerNested) :: elems1 => @@ -211,7 +235,7 @@ quant) } private var _enabled = false - private val qs = new mutable.WeakHashMap[Quantity, Unit] + private val qs = new mutable.HashMap[String, Quantity] def enabled = _enabled def enabled_=(cond: Boolean) = { @@ -229,4 +253,9 @@ quant) _enabled = true } } + + /** replace rhs with enabled and rebuild to also count tiny but super-hot methods + * such as phase, flags, owner, name. + */ + final val hotEnabled = false } diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index c90665508b..5b9090dae5 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -4,7 +4,7 @@ package runtime /** * This symbol table trait fills in the definitions so that class information is obtained by refection. * It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from - * a runtime compiler that uses reflection to get a class information (class scala.tools.nsc.ReflectGlobal) + * a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal) */ trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps { |