summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--classpath.SAMPLE12
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala172
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala26
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala67
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala128
-rw-r--r--src/compiler/scala/tools/reflect/ReflectGlobal.scala (renamed from src/compiler/scala/tools/nsc/ReflectGlobal.scala)9
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala (renamed from src/compiler/scala/tools/nsc/ReflectMain.scala)12
-rw-r--r--src/compiler/scala/tools/reflect/ReflectSetup.scala (renamed from src/compiler/scala/tools/nsc/ReflectSetup.scala)7
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala1
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala15
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala40
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala15
-rw-r--r--src/eclipse/README23
-rw-r--r--src/eclipse/asm/.classpath6
-rw-r--r--src/eclipse/asm/.project29
-rw-r--r--src/eclipse/fjbg/.classpath7
-rw-r--r--src/eclipse/fjbg/.project (renamed from project.SAMPLE)14
-rw-r--r--src/eclipse/reflect/.classpath7
-rw-r--r--src/eclipse/reflect/.project30
-rw-r--r--src/eclipse/scala-compiler/.classpath14
-rw-r--r--src/eclipse/scala-compiler/.project35
-rw-r--r--src/eclipse/scala-library/.classpath7
-rw-r--r--src/eclipse/scala-library/.project30
-rw-r--r--src/library/scala/collection/DefaultMap.scala4
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala9
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala4
-rw-r--r--src/library/scala/collection/IterableViewLike.scala2
-rw-r--r--src/library/scala/collection/Map.scala1
-rw-r--r--src/library/scala/collection/MapLike.scala4
-rw-r--r--src/library/scala/collection/SeqViewLike.scala2
-rw-r--r--src/library/scala/collection/SortedMap.scala19
-rw-r--r--src/library/scala/collection/SortedMapLike.scala23
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/TraversableOnce.scala12
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala (renamed from src/library/scala/collection/generic/FromRepr.scala)36
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala62
-rw-r--r--src/library/scala/collection/generic/package.scala8
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala22
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala33
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala31
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala12
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala3
-rw-r--r--src/library/scala/reflect/ClassTag.scala2
-rw-r--r--src/library/scala/reflect/compat.scala33
-rw-r--r--src/library/scala/reflect/makro/internal/package.scala2
-rw-r--r--src/library/scala/reflect/package.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala39
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala61
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala95
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.check4
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.scala18
-rw-r--r--test/files/continuations-run/ts-1681-2.check5
-rw-r--r--test/files/continuations-run/ts-1681-2.scala44
-rw-r--r--test/files/continuations-run/ts-1681-3.check4
-rw-r--r--test/files/continuations-run/ts-1681-3.scala27
-rw-r--r--test/files/continuations-run/ts-1681.check3
-rw-r--r--test/files/continuations-run/ts-1681.scala29
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree.check4
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala2
-rw-r--r--test/files/neg/t2442.check9
-rw-r--r--test/files/neg/t2442.flags1
-rw-r--r--test/files/neg/t2442/MyEnum.java3
-rw-r--r--test/files/neg/t2442/MySecondEnum.java6
-rw-r--r--test/files/neg/t2442/t2442.scala15
-rw-r--r--test/files/neg/t4541.check4
-rw-r--r--test/files/neg/t4541b.check4
-rw-r--r--test/files/neg/t4842.check7
-rw-r--r--test/files/neg/t4842.scala (renamed from test/files/neg/t4842b.scala)4
-rw-r--r--test/files/neg/t4842a.check4
-rw-r--r--test/files/neg/t4842a.scala3
-rw-r--r--test/files/neg/t4842b.check4
-rw-r--r--test/files/neg/t5148.check2
-rw-r--r--test/files/neg/t5148.scala4
-rw-r--r--test/files/neg/t5761.check16
-rw-r--r--test/files/neg/t5761.scala16
-rw-r--r--test/files/neg/t5839.check6
-rw-r--r--test/files/neg/t5839.scala7
-rw-r--r--test/files/pos/rangepos-anonapply.flags1
-rw-r--r--test/files/pos/rangepos-anonapply.scala9
-rw-r--r--test/files/pos/t5846.scala10
-rw-r--r--test/files/pos/t5899.flags1
-rw-r--r--test/files/pos/t5899.scala19
-rw-r--r--test/files/pos/t5932.flags1
-rw-r--r--test/files/pos/t5932.scala15
-rw-r--r--test/files/pos/t5953.scala2
-rw-r--r--test/files/pos/t5968.flags1
-rw-r--r--test/files/pos/t5968.scala8
-rw-r--r--test/files/presentation/ide-bug-1000531.check2
-rw-r--r--test/files/run/collection-conversions.check22
-rw-r--r--test/files/run/collection-conversions.scala20
-rw-r--r--test/files/run/enrich-gentraversable.check4
-rw-r--r--test/files/run/enrich-gentraversable.scala81
-rw-r--r--test/files/run/macro-reify-splice-splice/Macros_1.scala1
-rw-r--r--test/files/run/t3326.check8
-rw-r--r--test/files/run/t3326.scala74
-rw-r--r--test/files/run/t4027.check12
-rw-r--r--test/files/run/t4027.scala27
-rw-r--r--test/files/run/t4935.check1
-rw-r--r--test/files/run/t4935.flags1
-rw-r--r--test/files/run/t4935.scala9
-rw-r--r--test/files/run/t5914.check1
-rw-r--r--test/files/run/t5914.scala23
-rw-r--r--test/files/run/t5966.check3
-rw-r--r--test/files/run/t5966.scala9
-rw-r--r--test/files/run/t5971.check4
-rw-r--r--test/files/run/t5971.scala23
-rw-r--r--test/files/run/t5986.check15
-rw-r--r--test/files/run/t5986.scala36
-rw-r--r--test/files/scalacheck/redblacktree.scala4
123 files changed, 1336 insertions, 690 deletions
diff --git a/classpath.SAMPLE b/classpath.SAMPLE
deleted file mode 100644
index 9e607a41d9..0000000000
--- a/classpath.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src/compiler"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/fjbg.jar"/>
- <classpathentry kind="lib" path="lib/forkjoin.jar"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="output" path="build/quick/classes/compiler"/>
-</classpath>
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0c527fbaf4..59adcc637a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -458,6 +458,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -644,6 +648,86 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
} // end of class JBuilder
@@ -654,10 +738,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// more constants
// -----------------------------------------------------------------------------------------
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
-
val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
@@ -969,86 +1049,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
/** Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -3033,9 +3033,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- // TODO no inner classes attribute is written. Confirm intent.
- assert(innerClassBuffer.isEmpty, innerClassBuffer)
-
+ addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d4ee9b6b48..5cc6e78e9d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -100,9 +100,29 @@ abstract class DeadCodeElimination extends SubComponent {
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
+
case LOAD_LOCAL(l) =>
defs = defs + Pair(((bb, idx)), rd.vars)
-// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs)
+
+ case STORE_LOCAL(_) =>
+ /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
+ * (otherwise any side-effects of the module's constructor go lost).
+ * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
+ * are already marked (case clause below).
+ * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
+ * will have the module's load marked provided `isSideEffecting(m1)`.
+ * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
+ * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+ */
+ val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
+ val (bb1, idx1) = p
+ bb1(idx1) match {
+ case LOAD_MODULE(module) => isLoadNeeded(module)
+ case _ => false
+ }
+ }
+ if (necessary) worklist += ((bb, idx))
+
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
@@ -129,6 +149,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
+ private def isLoadNeeded(module: Symbol): Boolean = {
+ module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
+ }
+
/** Mark useful instructions. Instructions in the worklist are each inspected and their
* dependencies are marked useful too, and added to the worklist.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index d8bf23f4fe..046b177444 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -879,7 +879,7 @@ abstract class ClassfileParser {
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
case tpnme.ScalaATTR =>
@@ -897,7 +897,7 @@ abstract class ClassfileParser {
case Some(san: AnnotationInfo) =>
val bytes =
san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
case None =>
throw new RuntimeException("Scala class file does not contain Scala annotation")
}
@@ -1013,9 +1013,16 @@ abstract class ClassfileParser {
} catch {
case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
case ex: Throwable =>
- debuglog("dropping annotation on " + sym + ", an error occured during parsing (e.g. annotation class not found)")
-
- None // ignore malformed annotations ==> t1135
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ if (settings.debug.value) ex.printStackTrace()
+
+ None // ignore malformed annotations
}
/**
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 1d820afe11..124d350385 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1355,8 +1355,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+
+ def reportError[T](body: =>T)(handler: TypeError => T): T =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.msg)
+ handler(te)
+ }
- override def transform(tree: Tree): Tree = {
+ override def transform(tree: Tree): Tree =
+ reportError { transform1(tree) } {_ => tree}
+
+ def transform1(tree: Tree) = {
val symbol = tree.symbol
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
@@ -1382,14 +1393,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else None
}
- def reportError[T](body: =>T)(handler: TypeError => T): T =
- try body
- catch {
- case te: TypeError =>
- reporter.error(tree.pos, te.msg)
- handler(te)
- }
-
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -1501,13 +1504,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
- def reportTypeError(body: =>Tree) =
- try body
- catch {
- case te: TypeError =>
- reporter.error(te.pos, te.toString)
- ddef
- }
+ def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
+
if (symbol.isConstructor) {
val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index a4d9cdaffe..69b27045ab 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -88,9 +88,11 @@ trait EtaExpansion { self: Analyzer =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe))
- // zipAll: with repeated params, there might be more args than params
- val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) }
+ val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift
+ val newArgs = mapWithIndex(args) { (arg, i) =>
+ // with repeated params, there might be more or fewer args than params
+ liftout(arg, byName(i).getOrElse(false))
+ }
treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index f7e00109ae..68782379a6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1125,7 +1125,7 @@ trait Implicits {
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: Infoss = {
- Statistics.incCounter(implicitCacheHits)
+ Statistics.incCounter(implicitCacheAccs)
implicitsCache get pt match {
case Some(implicitInfoss) =>
Statistics.incCounter(implicitCacheHits)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 688dcd91ac..e99c31374e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1104,7 +1104,9 @@ trait Infer {
try {
// debuglog("TVARS "+ (tvars map (_.constr)))
// look at the argument types of the primary constructor corresponding to the pattern
- val variances = undetparams map varianceInType(ctorTp.paramTypes.headOption getOrElse ctorTp)
+ val variances =
+ if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
+ else undetparams map varianceInTypes(ctorTp.paramTypes)
val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index bde3ad98c9..3eff5ef024 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -86,10 +86,6 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
- /** RETmode is set when we are typing a return expression.
- */
- final val RETmode = 0x20000
-
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 9b8ddffb49..c466206192 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -123,7 +123,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def zero: M[Nothing]
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
@@ -137,7 +136,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
}
*/
@@ -2006,7 +2004,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private val uniques = new collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
- class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar {
+ class Var(val path: Tree, fullTp: Type) extends AbsVar {
private[this] val id: Int = Var.nextId
// private[this] var canModify: Option[Array[StackTraceElement]] = None
@@ -2028,26 +2026,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[Set[Const]] =
- if (!checked) None
- else {
- val subConsts = enumerateSubtypes(fullTp).map{ tps =>
- tps.toSet[Type].map{ tp =>
- val domainC = TypeConst(tp)
- registerEquality(domainC)
- domainC
- }
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(fullTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
}
+ }
- val allConsts =
- if (! _considerNull) subConsts
- else {
- registerEquality(NullConst)
- subConsts map (_ + NullConst)
- }
+ val allConsts =
+ if (! _considerNull) subConsts
+ else {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ }
- observed; allConsts
- }
+ observed; allConsts
+ }
// accessing after calling considerNull will result in inconsistencies
lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
@@ -2157,6 +2153,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// the equals inherited from AnyRef does just this
}
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type) = {
+ // getOrElse to err on the safe side -- all BTS should end in Any, right?
+ val wideTp = tp.widen
+ val clsTp =
+ if (wideTp.typeSymbol.isClass) wideTp
+ else wideTp.baseTypeSeq.toList.find(_.typeSymbol.isClass).getOrElse(AnyClass.tpe)
+ // patmatDebug("Widening to class: "+ (tp, clsTp, tp.widen, tp.widen.baseTypeSeq, tp.widen.baseTypeSeq.toList.find(_.typeSymbol.isClass)))
+ clsTp
+ }
object TypeConst {
def apply(tp: Type) = {
@@ -2172,7 +2183,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
assert(!(tp =:= NullTp))
private[this] val id: Int = Const.nextTypeId
- val wideTp = tp.widen
+ val wideTp = widenToClass(tp)
override def toString = tp.toString //+"#"+ id
}
@@ -2191,10 +2202,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val tp = p.tpe.normalize
if (tp =:= NullTp) NullConst
else {
- val wideTp = {
- if (p.hasSymbol && p.symbol.isStable) tp.asSeenFrom(tp.prefix, p.symbol.owner).widen
- else tp.widen
- }
+ val wideTp = widenToClass(tp)
val narrowTp =
if (tp.isInstanceOf[SingletonType]) tp
@@ -2354,14 +2362,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// exhaustivity
- // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- // TODO: domain of feasibly enumerable built-in types (enums, char?)
+ // TODO: domain of other feasibly enumerable built-in types (char?)
def enumerateSubtypes(tp: Type): Option[List[Type]] =
tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
case BooleanClass =>
// patmatDebug("enum bool "+ tp)
Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
// TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
// patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
None
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 119bb0852c..44fd4e9afd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1150,7 +1150,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
nonSensiblyNew()
else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
nonSensiblyNew()
- else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y
+ else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
if (isEitherNullable)
nonSensible("non-null ", false)
else
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 04d9d67d68..a167180267 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1833,7 +1833,7 @@ trait Typers extends Modes with Adaptations with Tags {
val params = fn.tpe.params
val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
+ assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -2036,7 +2036,7 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
// At this point in AnyVal there is no supercall, which will blow up
// in computeParamAliases; there's nothing to be computed for Anyval anyway.
if (meth.isPrimaryConstructor)
@@ -3103,66 +3103,67 @@ trait Typers extends Modes with Adaptations with Tags {
val otpe = fun.tpe
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ //
+ def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
+ case MethodType(param :: _, _) =>
+ (Nil, param.tpe)
+ case PolyType(tparams, restpe) =>
+ createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
+ // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
+ case OverloadedType(_, _) =>
+ OverloadedUnapplyError(fun)
+ (Nil, ErrorType)
+ case _ =>
+ UnapplyWithSingleArgError(fun)
+ (Nil, ErrorType)
+ }
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
+ val unapp = unapplyMember(otpe)
+ val unappType = otpe.memberType(unapp)
+ val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
+ val arg = Ident(argDummy) setType pt
val uncheckedTypeExtractor =
if (unappType.paramTypes.nonEmpty)
extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
else None
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
+ if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
+ //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
- val typer1 = newTyper(unapplyContext)
+ val typer1 = newTyper(unapplyContext)
val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ arg.tpe = pattp.substSym(freeVars, skolems)
+ argDummy setInfo arg.tpe
+ }
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ // setType null is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
- if (fun1.tpe.isErroneous) {
- duplErrTree
- } else {
- val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
- val formals1 = formalTypes(formals0, args.length)
- if (sameLength(formals1, args)) {
- val args1 = typedArgs(args, mode, formals0, formals1)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
- val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ if (fun1.tpe.isErroneous) duplErrTree
+ else {
+ val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
+ val formals1 = formalTypes(formals0, args.length)
+
+ if (!sameLength(formals1, args)) duplErrorTree(WrongNumberArgsPatternError(tree, fun))
+ else {
+ val args1 = typedArgs(args, mode, formals0, formals1)
+ // This used to be the following (failing) assert:
+ // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
+ // I modified as follows. See SI-1048.
+ val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+
+ val itype = glb(List(pt1, arg.tpe))
+ arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
// if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
@@ -3170,9 +3171,8 @@ trait Typers extends Modes with Adaptations with Tags {
// also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- } else
- duplErrorTree(WrongNumberArgsPatternError(tree, fun))
- }
+ }
+ }
}
def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
@@ -4024,8 +4024,7 @@ trait Typers extends Modes with Adaptations with Tags {
ReturnWithoutTypeError(tree, enclMethod.owner)
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
-
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4981,7 +4980,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedTypeApply(tree, mode, fun1, args1)
case Apply(Block(stats, expr), args) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt)
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
case Apply(fun, args) =>
typedApply(fun, args) match {
@@ -5134,7 +5133,7 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
- val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass)
+ val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass))
Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
@@ -5190,7 +5189,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
- Statistics.popTimerClass(byTypeNanos, startByType)
+ Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -5378,10 +5377,11 @@ object TypersStats {
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
- val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos)
- val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos)
- val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos)
- val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos)
- val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter(""))
- val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos)
+ val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
+ val byTypeStack = Statistics.newTimerStack()
}
diff --git a/src/compiler/scala/tools/nsc/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index 4f4db83339..f8ded56ec6 100644
--- a/src/compiler/scala/tools/nsc/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -1,12 +1,15 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
-import reporters.Reporter
+import scala.tools.nsc.Global
+import scala.tools.nsc.reporters.Reporter
+import scala.tools.nsc.Settings
/** A version of Global that uses reflection to get class
* infos, instead of reading class or source files.
*/
class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader)
- extends Global(currentSettings, reporter) with scala.tools.nsc.ReflectSetup with scala.reflect.runtime.SymbolTable {
+ extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
override def transformedType(sym: Symbol) =
erasure.transformInfo(sym,
diff --git a/src/compiler/scala/tools/nsc/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 161391fc2c..116ae24cdd 100644
--- a/src/compiler/scala/tools/nsc/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -1,8 +1,12 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
-import tools.util.PathResolver
-import util.ClassPath.DefaultJavaContext
-import util.ScalaClassLoader
+import scala.tools.nsc.Driver
+import scala.tools.nsc.Global
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.util.PathResolver
object ReflectMain extends Driver {
diff --git a/src/compiler/scala/tools/nsc/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala
index 26c720a10f..f18c114d62 100644
--- a/src/compiler/scala/tools/nsc/ReflectSetup.scala
+++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala
@@ -1,7 +1,10 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
+
+import scala.tools.nsc.Global
/** A helper trait to initialize things that need to be set before JavaMirrors and other
* reflect specific traits are initialized */
-private[nsc] trait ReflectSetup { this: Global =>
+private[reflect] trait ReflectSetup { this: Global =>
phase = new Run().typerPhase
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 278f4e3ff7..b4178102b9 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -2,7 +2,6 @@ package scala.tools
package reflect
import scala.tools.nsc.reporters._
-import scala.tools.nsc.ReflectGlobal
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.Global
import scala.tools.nsc.typechecker.Modes
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 464ffc6fab..a20ff1667b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -171,9 +171,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
vprintln("yes we can!! (byval)")
return true
}
- } else if ((mode & global.analyzer.RETmode) != 0) {
- vprintln("yes we can!! (return)")
- return true
}
}
false
@@ -187,7 +184,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val patMode = (mode & global.analyzer.PATTERNmode) != 0
val exprMode = (mode & global.analyzer.EXPRmode) != 0
val byValMode = (mode & global.analyzer.BYVALmode) != 0
- val retMode = (mode & global.analyzer.RETmode) != 0
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
@@ -214,12 +210,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
- } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having no annotation
- val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
- vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
- res
} else tree
}
@@ -476,11 +466,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
}
tpe
- case ret @ Return(expr) =>
- if (hasPlusMarker(expr.tpe))
- ret setType expr.tpe
- ret.tpe
-
case _ =>
tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 765cde5a81..3a1dc87a6a 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -3,7 +3,6 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
-import scala.collection.mutable.ListBuffer
trait CPSUtils {
val global: Global
@@ -136,43 +135,4 @@ trait CPSUtils {
case _ => None
}
}
-
- def isTailReturn(retExpr: Tree, body: Tree): Boolean = {
- val removed = ListBuffer[Tree]()
- removeTailReturn(body, removed)
- removed contains retExpr
- }
-
- def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match {
- case Block(stms, r @ Return(expr)) =>
- removed += r
- treeCopy.Block(tree, stms, expr)
-
- case Block(stms, expr) =>
- treeCopy.Block(tree, stms, removeTailReturn(expr, removed))
-
- case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
- removed ++= Seq(r1, r2)
- treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
-
- case If(cond, thenExpr, elseExpr) =>
- treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
-
- case Try(block, catches, finalizer) =>
- treeCopy.Try(tree,
- removeTailReturn(block, removed),
- (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]],
- removeTailReturn(finalizer, removed))
-
- case CaseDef(pat, guard, r @ Return(expr)) =>
- removed += r
- treeCopy.CaseDef(tree, pat, guard, expr)
-
- case CaseDef(pat, guard, body) =>
- treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed))
-
- case _ =>
- tree
- }
-
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index fe465aad0d..017c8d24fd 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -9,8 +9,6 @@ import scala.tools.nsc.plugins._
import scala.tools.nsc.ast._
-import scala.collection.mutable.ListBuffer
-
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -48,20 +46,10 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
- val tailReturns = ListBuffer[Tree]()
- val rhs = removeTailReturn(rhs0, tailReturns)
- // throw an error if there is a Return tree which is not in tail position
- rhs0 foreach {
- case r @ Return(_) =>
- if (!tailReturns.contains(r))
- unit.error(r.pos, "return expressions in CPS code must be in tail position")
- case _ => /* do nothing */
- }
-
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
debuglog("result "+rhs1)
@@ -165,6 +153,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
+
def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
transTailValue(tree, cpsA, cpsR) match {
case (Nil, b) => b
diff --git a/src/eclipse/README b/src/eclipse/README
new file mode 100644
index 0000000000..58dbd83815
--- /dev/null
+++ b/src/eclipse/README
@@ -0,0 +1,23 @@
+Eclipse project files
+=====================
+
+Import all projects inside Eclipse by choosing File/Import Existing Projects
+and navigate to src/eclipse. Check all projects and click ok.
+
+IMPORTANT
+=========
+
+You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
+Preferences/General/Workspace/Linked Resources. The value should be the absolute
+path to your scala checkout. All paths in project files are relative to this one,
+so nothing will work before you do so.
+
+DETAILS
+=======
+
+The compiler project depends on the library, reflect, asm and fjbg projects. The
+builder will take care of the correct ordering, and changes in one project will
+be picked up by the dependent projects.
+
+The output directory is set to be build/quick, so the runner scripts in quick
+work as they are (run an ant build to have the generated once) \ No newline at end of file
diff --git a/src/eclipse/asm/.classpath b/src/eclipse/asm/.classpath
new file mode 100644
index 0000000000..03d9e9788d
--- /dev/null
+++ b/src/eclipse/asm/.classpath
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="asm-quick-bin"/>
+</classpath>
diff --git a/src/eclipse/asm/.project b/src/eclipse/asm/.project
new file mode 100644
index 0000000000..c9051389af
--- /dev/null
+++ b/src/eclipse/asm/.project
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>asm</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>src</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/asm</locationURI>
+ </link>
+ <link>
+ <name>asm-quick-bin</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/asm/classes</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
new file mode 100644
index 0000000000..3e2f55f48a
--- /dev/null
+++ b/src/eclipse/fjbg/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="fjbg"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="libs-classes-fjbg"/>
+</classpath>
diff --git a/project.SAMPLE b/src/eclipse/fjbg/.project
index 0034c397ed..8acea9f5fe 100644
--- a/project.SAMPLE
+++ b/src/eclipse/fjbg/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>scala</name>
+ <name>fjbg</name>
<comment></comment>
<projects>
</projects>
@@ -15,4 +15,16 @@
<nature>org.scala-ide.sdt.core.scalanature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
+ <linkedResources>
+ <link>
+ <name>fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ </link>
+ <link>
+ <name>libs-classes-fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ </link>
+ </linkedResources>
</projectDescription>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
new file mode 100644
index 0000000000..2a764d5142
--- /dev/null
+++ b/src/eclipse/reflect/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="reflect"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-reflect"/>
+</classpath>
diff --git a/src/eclipse/reflect/.project b/src/eclipse/reflect/.project
new file mode 100644
index 0000000000..1e5cbb4ed9
--- /dev/null
+++ b/src/eclipse/reflect/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>reflect</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/reflect</locationURI>
+ </link>
+ <link>
+ <name>reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/reflect</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
new file mode 100644
index 0000000000..ff3b63f3ca
--- /dev/null
+++ b/src/eclipse/scala-compiler/.classpath
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry kind="output" path="build-quick-compiler"/>
+</classpath>
diff --git a/src/eclipse/scala-compiler/.project b/src/eclipse/scala-compiler/.project
new file mode 100644
index 0000000000..cf8a68c8b6
--- /dev/null
+++ b/src/eclipse/scala-compiler/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-compiler</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/compiler</locationURI>
+ </link>
+ <link>
+ <name>compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/compiler</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
new file mode 100644
index 0000000000..a3a4933d34
--- /dev/null
+++ b/src/eclipse/scala-library/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-lib"/>
+</classpath>
diff --git a/src/eclipse/scala-library/.project b/src/eclipse/scala-library/.project
new file mode 100644
index 0000000000..049cf75e0b
--- /dev/null
+++ b/src/eclipse/scala-library/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-library</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/library</locationURI>
+ </link>
+ <link>
+ <name>library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/library</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index d00414751a..d961bd84bb 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -27,7 +27,7 @@ import generic._
* @since 2.8
*/
trait DefaultMap[A, +B] extends Map[A, B] { self =>
-
+
/** A default implementation which creates a new immutable map.
*/
override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
@@ -41,7 +41,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
*/
override def - (key: A): Map[A, B] = {
val b = newBuilder
- b ++= this filter (key != _)
+ b ++= this filter (key != _._1)
b.result
}
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index eaec7a2a76..0d51230623 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -411,12 +411,3 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
def stringPrefix: String
}
-
-object GenTraversableLike {
- /** Manufacture a conversion from collection representation type `Repr` to
- * its corresponding `GenTraversableLike` given an implicitly available
- * instance of `FromRepr[Repr]`.
- * @see [[scala.collection.generic.FromRepr]]
- */
- implicit def fromRepr[Repr](implicit fr : FromRepr[Repr]) = fr.hasElem
-}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index e475865391..25edcfe19c 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -565,10 +565,10 @@ trait GenTraversableOnce[+A] extends Any {
* @tparam Col The collection type to build.
* @return a new collection containing all elements of this $coll.
*
- * @usecase def convertTo[Col[_]]: Col[A]
+ * @usecase def to[Col[_]]: Col[A]
* @inheritdoc
* $willNotTerminateInf
* @return a new collection containing all elements of this $coll.
*/
- def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index c842475590..e0c8b21d09 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -44,7 +44,7 @@ trait IterableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[TraversableViewLike].Transformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 42a56a9c5a..19c24c9791 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -51,6 +51,7 @@ object Map extends MapFactory[Map] {
def iterator = underlying.iterator
override def default(key: A): B = d(key)
}
+
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 75f9ff93db..55d482f6c8 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -226,7 +226,7 @@ self =>
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
-
+
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
@@ -240,7 +240,7 @@ self =>
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
-
+
protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index f64045c9f6..73f5dda11c 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -40,7 +40,7 @@ trait SeqViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[IterableViewLike].AbstractTransformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 3f92908848..e32e0977df 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -30,9 +30,26 @@ trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B
* @since 2.8
*/
object SortedMap extends SortedMapFactory[SortedMap] {
- def empty[A, B](implicit ord: Ordering[A]): immutable.SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
+ def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
+
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 4dc0820a62..f9e95230ea 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -72,4 +72,27 @@ self =>
for (e <- elems) m = m + e
m
}
+
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
+ /** Adds a number of elements provided by a traversable object
+ * and returns a new collection with the added elements.
+ *
+ * @param xs the traversable object.
+ */
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
}
+
+
+
+
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index e5861f5760..9356832afd 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -617,7 +617,7 @@ trait TraversableLike[+A, +Repr] extends Any
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
// Override to provide size hint.
- override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b.sizeHint(this)
b ++= thisCollection
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 8dc6184d88..fb73805cc5 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -240,21 +240,21 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = convertTo[List]
+ def toList: List[A] = to[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
+ def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
+ def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+ def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]]
- def toVector: Vector[A] = convertTo[Vector]
+ def toVector: Vector[A] = to[Vector]
- def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b ++= seq
b.result
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index eb2091a5f3..bf4f8205d6 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -117,7 +117,7 @@ trait TraversableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super.EmptyView
diff --git a/src/library/scala/collection/generic/FromRepr.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index c08761332c..7288322903 100644
--- a/src/library/scala/collection/generic/FromRepr.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -18,14 +18,12 @@ package generic
*
* Example usage,
* {{{
- * import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem }
- *
- * class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) {
- * def filterMap[B, That](f : A => Option[B])
- * (implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq)
+ * class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ * r.flatMap(f(_).toSeq)
* }
- *
- * implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r)
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl(fr.conversion(r))
*
* val l = List(1, 2, 3, 4, 5)
* List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
@@ -33,24 +31,28 @@ package generic
* }}}
*
* @author Miles Sabin
+ * @author J. Suereth
* @since 2.10
*/
-trait FromRepr[Repr] {
+trait IsTraversableLike[Repr] {
+ /** The type of elements we can traverse over. */
type A
- val hasElem: HasElem[Repr, A]
+ /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */
+ val conversion: Repr => GenTraversableLike[A, Repr]
}
-object FromRepr {
+object IsTraversableLike {
import language.higherKinds
- implicit val stringFromRepr : FromRepr[String] { type A = Char } = new FromRepr[String] {
- type A = Char
- val hasElem = implicitly[HasElem[String, Char]]
- }
+ implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
+ new IsTraversableLike[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableLike[Char, String]]
+ }
- implicit def genTraversableLikeFromRepr[C[_], A0]
- (implicit hasElem0: HasElem[C[A0], A0]) : FromRepr[C[A0]] { type A = A0 } = new FromRepr[C[A0]] {
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
+ new IsTraversableLike[C[A0]] {
type A = A0
- val hasElem = hasElem0
+ val conversion = conv
}
}
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
new file mode 100644
index 0000000000..b336553231
--- /dev/null
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -0,0 +1,62 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `GenTraversableOnce[A]`.
+ *
+ * This type enables simple enrichment of `GenTraversableOnce`s with extension
+ * methods which can make full use of the mechanics of the Scala collections
+ * framework in their implementation.
+ *
+ * Example usage,
+ * {{{
+ * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ * val b = cbf()
+ * for(e <- r.seq) f(e) foreach (b +=)
+ * b.result
+ * }
+ * }
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @author Miles Sabin
+ * @author J. Suereth
+ * @since 2.10
+ */
+trait IsTraversableOnce[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */
+ val conversion: Repr => GenTraversableOnce[A]
+}
+
+object IsTraversableOnce {
+ import language.higherKinds
+
+ implicit val stringRepr: IsTraversableOnce[String] { type A = Char } =
+ new IsTraversableOnce[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableOnce[Char]]
+ }
+
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
+ new IsTraversableOnce[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
+
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 85b9995f2e..6eecb5e3ff 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -6,12 +6,6 @@ import language.higherKinds
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
- /** The type of conversions from a collection representation type
- * `Repr` to its corresponding GenTraversableLike.
- * @see [[scala.collection.generic.FromRepr]]
- */
- type HasElem[Repr, A] = Repr => GenTraversableLike[A, Repr]
-
@deprecated("use ClassTagTraversableFactory instead", "2.10.0")
type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC]
@@ -20,4 +14,4 @@ package object generic {
@deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0")
type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC]
-} \ No newline at end of file
+}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 6ae2d78188..2dc08fff24 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -48,7 +48,8 @@ import parallel.immutable.ParMap
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends scala.collection.MapLike[A, B, This]
with Parallelizable[(A, B), ParMap[A, B]]
-{ self =>
+{
+self =>
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
@@ -84,31 +85,20 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] {
- override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
- def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
- def get(key: A) = if (!p(key)) None else self.get(key)
- }
-
+ override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- override def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] {
- override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
- def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
- override def size = self.size
- override def contains(key: A) = self.contains(key)
- def get(key: A) = self.get(key).map(f)
- }
+ override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C]
/** Collects all keys of this map in a set.
* @return a set containing all keys of this map.
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0f28c4997b..4b573511d1 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -43,7 +43,7 @@ object RedBlackTree {
}
def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
- def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v))
+ def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
case (Some(from), Some(until)) => this.range(tree, from, until)
@@ -122,17 +122,18 @@ object RedBlackTree {
else
mkTree(isBlack, x, xv, a, r)
}
- private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
RedTree(k, v, null, null)
} else {
val cmp = ordering.compare(k, tree.key)
- if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right)
- else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v))
- else mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right)
+ else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite))
+ else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
}
- // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
- // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
+ /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ * http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
if (isRedTree(tr)) {
@@ -216,7 +217,7 @@ object RedBlackTree {
if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
val newLeft = doFrom(tree.left, from)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -224,7 +225,7 @@ object RedBlackTree {
if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
val newRight = doTo(tree.right, to)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -232,7 +233,7 @@ object RedBlackTree {
if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
val newRight = doUntil(tree.right, until)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -242,8 +243,8 @@ object RedBlackTree {
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
else rebalance(tree, newLeft, newRight)
}
@@ -254,7 +255,7 @@ object RedBlackTree {
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -264,7 +265,7 @@ object RedBlackTree {
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -275,8 +276,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value)
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 526f7a1ffe..f147b673f7 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -30,7 +30,9 @@ import annotation.unchecked.uncheckedVariance
trait SortedMap[A, +B] extends Map[A, B]
with scala.collection.SortedMap[A, B]
with MapLike[A, B, SortedMap[A, B]]
- with SortedMapLike[A, B, SortedMap[A, B]] { self =>
+ with SortedMapLike[A, B, SortedMap[A, B]]
+{
+self =>
override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
SortedMap.newBuilder[A, B]
@@ -76,6 +78,17 @@ trait SortedMap[A, +B] extends Map[A, B]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
}
/** $factoryInfo
@@ -86,4 +99,20 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] with collection.SortedMap.Default[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 4c1a5f2e03..51bc76efc3 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -131,7 +131,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value))
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -171,7 +171,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(!RB.contains(tree, key))
- new TreeMap(RB.update(tree, key, value))
+ new TreeMap(RB.update(tree, key, value, true))
}
def - (key:A): TreeMap[A, B] =
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 882e828c5b..697da2bc4b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -112,7 +112,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ()))
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +122,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
*/
def insert(elem: A): TreeSet[A] = {
assert(!RB.contains(tree, elem))
- newSet(RB.update(tree, elem, ()))
+ newSet(RB.update(tree, elem, (), false))
}
/** Creates a new `TreeSet` with the entry removed.
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a7ec833193..82498f9b63 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -841,7 +841,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -850,13 +850,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-
- // TODO(@alex22): make these better
- override def toVector: Vector[T] = seq.toVector
-
- override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col]
+ override def toVector: Vector[T] = to[Vector]
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
+ toParCollection[T, Col[T]](() => cbf().asCombiner)
+ } else seq.to(cbf)
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index d8c42d74b0..349f4fa44c 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -34,6 +34,7 @@ extends collection/*.immutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 700d21d0bb..b5747a31cf 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -29,7 +29,8 @@ import scala.collection.GenIterable
trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+ with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with Mutable {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
//protected[this] override def newBuilder = ParIterable.newBuilder[T]
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index f753dfbcbb..860e7bac28 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -52,7 +52,7 @@ trait ClassTag[T] extends Equals with Serializable {
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = if (runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
+ def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
/** case class accessories */
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
diff --git a/src/library/scala/reflect/compat.scala b/src/library/scala/reflect/compat.scala
deleted file mode 100644
index fc0e5fbf9c..0000000000
--- a/src/library/scala/reflect/compat.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// [Eugene++] delete this once we merge with trunk and have a working IDE
-
-package scala.reflect {
- trait ArrayTag[T]
- trait ErasureTag[T]
- trait ConcreteTypeTag[T]
-}
-
-package scala.reflect.api {
- trait TypeTags {
- trait TypeTag[T]
- trait ConcreteTypeTag[T]
- }
-}
-
-package scala {
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait reflect_compat {
- lazy val mirror: BaseUniverse = ???
- }
-}
-
-package scala.reflect {
- import language.experimental.macros
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait internal_compat {
- private[scala] def materializeArrayTag[T](u: BaseUniverse): ArrayTag[T] = ???
- private[scala] def materializeErasureTag[T](u: BaseUniverse): ErasureTag[T] = ???
- private[scala] def materializeConcreteTypeTag[T](u: BaseUniverse): ConcreteTypeTag[T] = ???
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/makro/internal/package.scala b/src/library/scala/reflect/makro/internal/package.scala
index d31a0f0d14..78cb0ffb10 100644
--- a/src/library/scala/reflect/makro/internal/package.scala
+++ b/src/library/scala/reflect/makro/internal/package.scala
@@ -9,7 +9,7 @@ import scala.reflect.base.{Universe => BaseUniverse}
//
// todo. once we have implicit macros for tag generation, we can remove these anchors
// [Eugene++] how do I hide this from scaladoc?
-package object internal extends scala.reflect.internal_compat {
+package object internal {
private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = macro ???
private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = macro ???
private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = macro ???
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 0ee58df2cd..2ebc82875e 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,6 +1,6 @@
package scala
-package object reflect extends reflect_compat {
+package object reflect {
lazy val basis: base.Universe = new base.Base
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index 2223a6db0f..2aa9a99054 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -336,7 +336,6 @@ import ILGenerator._
emitSpecialLabel(Label.Try)
val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
excStack.push(Label.Try, endExc)
- return endExc
}
/** Begins a catch block. */
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 18adab7c68..5ae8f22c64 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -129,11 +129,15 @@ abstract class SymbolTable extends makro.Universe
// sigh, this has to be public or atPhase doesn't inline.
var phStack: List[Phase] = Nil
- private var ph: Phase = NoPhase
- private var per = NoPeriod
+ private[this] var ph: Phase = NoPhase
+ private[this] var per = NoPeriod
final def atPhaseStack: List[Phase] = phStack
- final def phase: Phase = ph
+ final def phase: Phase = {
+ if (Statistics.hotEnabled)
+ Statistics.incCounter(SymbolTableStats.phaseCounter)
+ ph
+ }
def atPhaseStackMessage = atPhaseStack match {
case Nil => ""
@@ -330,3 +334,7 @@ abstract class SymbolTable extends makro.Universe
*/
def isCompilerUniverse = false
}
+
+object SymbolTableStats {
+ val phaseCounter = Statistics.newCounter("#phase calls")
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 4b0ceeb86b..a3893a0236 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -645,6 +645,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def flags: Long = {
+ if (Statistics.hotEnabled) Statistics.incCounter(flagsCount)
val fs = _rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
@@ -936,7 +937,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
- def owner: Symbol = rawowner
+ def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
+ rawowner
+ }
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
@@ -2324,7 +2329,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawname: TermName = initName
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
def name_=(name: Name) {
if (name != rawname) {
log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
@@ -2493,11 +2501,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def companionClass =
flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
- override def owner = (
+ override def owner = {
+ Statistics.incCounter(ownerCount)
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
- )
- override def name: TermName = (
+ }
+ override def name: TermName = {
+ Statistics.incCounter(nameCount)
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
@@ -2505,7 +2515,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
}
implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
@@ -2576,7 +2586,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// cloneSymbolImpl still abstract in TypeSymbol.
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
@@ -2888,10 +2901,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thisTypeCache
}
- override def owner: Symbol =
+ override def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
if (needsFlatClasses) rawowner.owner else rawowner
+ }
- override def name: TypeName = (
+ override def name: TypeName = {
+ Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
@@ -2899,7 +2915,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -3194,4 +3210,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
object SymbolsStats {
val typeSymbolCount = Statistics.newCounter("#type symbols")
val classSymbolCount = Statistics.newCounter("#class symbols")
+ val flagsCount = Statistics.newCounter("#flags ops")
+ val ownerCount = Statistics.newCounter("#owner ops")
+ val nameCount = Statistics.newCounter("#name ops")
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index d4b895bcb4..4cf2cceb81 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -679,7 +679,7 @@ trait Types extends api.Types { self: SymbolTable =>
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
Statistics.incCounter(asSeenFromCount)
- val start = Statistics.startTimer(asSeenFromNanos)
+ val start = Statistics.pushTimer(typeOpsStack, asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -687,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
- Statistics.stopTimer(asSeenFromNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
}
@@ -826,12 +826,12 @@ trait Types extends api.Types { self: SymbolTable =>
def stat_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -839,12 +839,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def weak_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -1018,7 +1018,7 @@ trait Types extends api.Types { self: SymbolTable =>
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
Statistics.incCounter(findMemberCount)
- val start = Statistics.startTimer(findMemberNanos)
+ val start = Statistics.pushTimer(typeOpsStack, findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1045,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1091,7 +1091,7 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
if (member == NoSymbol) Statistics.incCounter(noMemberCount)
@@ -1534,11 +1534,17 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
Statistics.incCounter(compoundBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
- else
- compoundBaseTypeSeq(tpe)
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache =
+ if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
// [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
// when compiling with
// scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
@@ -2390,8 +2396,13 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
Statistics.incCounter(typerefBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
}
}
if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
@@ -6306,13 +6317,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case _ =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6454,13 +6465,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case ts0 =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6884,11 +6895,13 @@ object TypesStats {
val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
- val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos)
- val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos)
- val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos)
- val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos)
+ val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
+ val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val typeOpsStack = Statistics.newTimerStack()
}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 57c9e98174..3a31c2858b 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -4,6 +4,8 @@ import collection.mutable
object Statistics {
+ type TimerSnapshot = (Long, Long)
+
/** If enabled, increment counter by one */
@inline final def incCounter(c: Counter) {
if (_enabled && c != null) c.value += 1
@@ -30,20 +32,20 @@ object Statistics {
}
/** If enabled, start timer */
- @inline final def startTimer(tm: Timer): (Long, Long) =
+ @inline final def startTimer(tm: Timer): TimerSnapshot =
if (_enabled && tm != null) tm.start() else null
/** If enabled, stop timer */
- @inline final def stopTimer(tm: Timer, start: (Long, Long)) {
+ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) {
if (_enabled && tm != null) tm.stop(start)
}
/** If enabled, push and start a new timer in timer stack */
- @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) =
- if (_enabled && timers != null) timers.push(cls) else null
+ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot =
+ if (_enabled && timers != null) timers.push(timer) else null
/** If enabled, stop and pop timer from timer stack */
- @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) {
+ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) {
if (_enabled && timers != null) timers.pop(prev)
}
@@ -73,6 +75,13 @@ object Statistics {
*/
def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+ /** Create a new stackable that shows as `prefix` and is active
+ * in the same phases as its base timer. Stackable timers are subtimers
+ * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * durations.
+ */
+ def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
+
/** Create a new view that shows as `prefix` and is active in given phases.
* The view always reflects the current value of `quant` as a quantity.
*/
@@ -86,20 +95,27 @@ quant)
/** Same as newQuantMap, where the key type is fixed to be Class[_] */
def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
- /** Create a new timer stack map, indexed by Class[_]. */
- def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying)
+ /** Create a new timer stack */
+ def newTimerStack() = new TimerStack()
def allQuantities: Iterable[Quantity] =
- for ((q, _) <- qs if !q.isInstanceOf[SubQuantity];
+ for ((_, q) <- qs if q.underlying == q;
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
private def showPercent(x: Double, base: Double) =
if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ /** The base trait for quantities.
+ * Quantities with non-empty prefix are printed in the statistics info.
+ */
trait Quantity {
- qs += (this -> ())
+ if (prefix.nonEmpty) {
+ val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+ qs(key) = this
+ }
val prefix: String
val phases: Seq[String]
+ def underlying: Quantity = this
def showAt(phase: String) = phases.isEmpty || (phases contains phase)
def line = f"$prefix%-30s: ${this}"
val children = new mutable.ListBuffer[Quantity]
@@ -123,7 +139,7 @@ quant)
override def toString = quant.toString
}
- private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
override def toString =
if (value == 0) "0"
else {
@@ -142,33 +158,39 @@ quant)
value + showPercent(value, underlying.value)
}
- class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] {
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
var nanos: Long = 0
var timings = 0
- def compare(that: Timer): Int =
- if (this.nanos < that.nanos) -1
- else if (this.nanos > that.nanos) 1
- else 0
def start() = {
(nanos, System.nanoTime())
}
- def stop(prev: (Long, Long)) {
+ def stop(prev: TimerSnapshot) {
val (nanos0, start) = prev
nanos = nanos0 + System.nanoTime() - start
timings += 1
}
- override def toString = s"$timings spans, ${nanos/1000}ms"
+ protected def show(ns: Long) = s"${ns/1000}ms"
+ override def toString = s"$timings spans, ${show(nanos)}"
}
- private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
- override def toString: String = super.toString + showPercent(nanos, underlying.nanos)
+ class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos)
+ }
+
+ class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] {
+ var specificNanos: Long = 0
+ def compare(that: StackableTimer): Int =
+ if (this.specificNanos < that.specificNanos) -1
+ else if (this.specificNanos > that.specificNanos) 1
+ else 0
+ override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
}
/** A mutable map quantity where missing elements are automatically inserted
* on access by executing `initValue`.
*/
class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
- extends scala.collection.mutable.HashMap[K, V] with Quantity {
+ extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity {
override def default(key: K) = {
val elem = initValue
this(key) = elem
@@ -183,23 +205,25 @@ quant)
}.mkString(", ")
}
- /** A mutable map quantity that takes class keys to subtimer values, relative to
- * some `underlying` timer. In addition, class timers can be pushed and popped.
- * Pushing the timer for a class means stopping the currently active timer.
+ /** A stack of timers, all active, where a timer's specific "clock"
+ * is stopped as long as it is buried by some other timer in the stack, but
+ * its aggregate clock keeps on ticking.
*/
- class ByClassTimerStack(prefix: String, val underlying: Timer)
- extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity {
- private var elems: List[(Timer, Long)] = Nil
- def push(cls: Class[_]): (Long, Long) = {
- val topTimer = this(cls)
- elems = (topTimer, 0L) :: elems
- topTimer.start()
+ class TimerStack {
+ private var elems: List[(StackableTimer, Long)] = Nil
+ /** Start given timer and push it onto the stack */
+ def push(t: StackableTimer): TimerSnapshot = {
+ elems = (t, 0L) :: elems
+ t.start()
}
- def pop(prev: (Long, Long)) = {
+ /** Stop and pop top timer in stack
+ */
+ def pop(prev: TimerSnapshot) = {
val (nanos0, start) = prev
val duration = System.nanoTime() - start
val (topTimer, nestedNanos) :: rest = elems
- topTimer.nanos = nanos0 + duration - nestedNanos
+ topTimer.nanos = nanos0 + duration
+ topTimer.specificNanos += duration - nestedNanos
topTimer.timings += 1
elems = rest match {
case (outerTimer, outerNested) :: elems1 =>
@@ -211,7 +235,7 @@ quant)
}
private var _enabled = false
- private val qs = new mutable.WeakHashMap[Quantity, Unit]
+ private val qs = new mutable.HashMap[String, Quantity]
def enabled = _enabled
def enabled_=(cond: Boolean) = {
@@ -229,4 +253,9 @@ quant)
_enabled = true
}
}
+
+ /** replace rhs with enabled and rebuild to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index c90665508b..5b9090dae5 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -4,7 +4,7 @@ package runtime
/**
* This symbol table trait fills in the definitions so that class information is obtained by refection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
- * a runtime compiler that uses reflection to get a class information (class scala.tools.nsc.ReflectGlobal)
+ * a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
*/
trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.check b/test/files/continuations-neg/ts-1681-nontail-return.check
deleted file mode 100644
index 8fe15f154b..0000000000
--- a/test/files/continuations-neg/ts-1681-nontail-return.check
+++ /dev/null
@@ -1,4 +0,0 @@
-ts-1681-nontail-return.scala:10: error: return expressions in CPS code must be in tail position
- return v
- ^
-one error found
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.scala b/test/files/continuations-neg/ts-1681-nontail-return.scala
deleted file mode 100644
index af86ad304f..0000000000
--- a/test/files/continuations-neg/ts-1681-nontail-return.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- if (v == 8)
- return v
- v + 1
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
-}
diff --git a/test/files/continuations-run/ts-1681-2.check b/test/files/continuations-run/ts-1681-2.check
deleted file mode 100644
index 35b3c93780..0000000000
--- a/test/files/continuations-run/ts-1681-2.check
+++ /dev/null
@@ -1,5 +0,0 @@
-8
-hi
-8
-from try
-8
diff --git a/test/files/continuations-run/ts-1681-2.scala b/test/files/continuations-run/ts-1681-2.scala
deleted file mode 100644
index 8a896dec2c..0000000000
--- a/test/files/continuations-run/ts-1681-2.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cps[Any] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
- def caller3 = reset { println(p3(3)) }
-
- def p(i: Int): Int @cps[Any] = {
- val v= s1 + 3
- return v
- }
-
- def p2(i: Int): Int @cps[Any] = {
- val v = s1 + 3
- if (v > 0) {
- println("hi")
- return v
- } else {
- println("hi")
- return 8
- }
- }
-
- def p3(i: Int): Int @cps[Any] = {
- val v = s1 + 3
- try {
- println("from try")
- return v
- } catch {
- case e: Exception =>
- println("from catch")
- return 7
- }
- }
-
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
- repro.caller3
-}
diff --git a/test/files/continuations-run/ts-1681-3.check b/test/files/continuations-run/ts-1681-3.check
deleted file mode 100644
index 71489f097c..0000000000
--- a/test/files/continuations-run/ts-1681-3.check
+++ /dev/null
@@ -1,4 +0,0 @@
-enter return expr
-8
-hi
-8
diff --git a/test/files/continuations-run/ts-1681-3.scala b/test/files/continuations-run/ts-1681-3.scala
deleted file mode 100644
index 62c547f5a2..0000000000
--- a/test/files/continuations-run/ts-1681-3.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- return { println("enter return expr"); v }
- }
-
- def p2(i: Int): Int @cpsParam[Unit, Any] = {
- val v = s1 + 3
- if (v > 0) {
- return { println("hi"); v }
- } else {
- return { println("hi"); 8 }
- }
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
-}
diff --git a/test/files/continuations-run/ts-1681.check b/test/files/continuations-run/ts-1681.check
deleted file mode 100644
index 85176d8e66..0000000000
--- a/test/files/continuations-run/ts-1681.check
+++ /dev/null
@@ -1,3 +0,0 @@
-8
-hi
-8
diff --git a/test/files/continuations-run/ts-1681.scala b/test/files/continuations-run/ts-1681.scala
deleted file mode 100644
index efb1abae15..0000000000
--- a/test/files/continuations-run/ts-1681.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- return v
- }
-
- def p2(i: Int): Int @cpsParam[Unit, Any] = {
- val v = s1 + 3
- if (v > 0) {
- println("hi")
- return v
- } else {
- println("hi")
- return 8
- }
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
-}
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
index 4fc06b5ceb..1b9487982f 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree.check
+++ b/test/files/neg/macro-invalidret-nonuniversetree.check
@@ -1,7 +1,7 @@
Macros_Test_2.scala:2: error: macro implementation has wrong shape:
required: (c: scala.reflect.makro.Context): c.Expr[Any]
- found : (c: scala.reflect.makro.Context): reflect.mirror.Literal
-type mismatch for return type: reflect.mirror.Literal does not conform to c.Expr[Any]
+ found : (c: scala.reflect.makro.Context): reflect.basis.Literal
+type mismatch for return type: reflect.basis.Literal does not conform to c.Expr[Any]
def foo = macro Impls.foo
^
one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
index 86b7c8d8d0..da0eb0ac83 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.makro.{Context => Ctx}
object Impls {
- def foo(c: Ctx) = scala.reflect.mirror.Literal(scala.reflect.mirror.Constant(42))
+ def foo(c: Ctx) = scala.reflect.basis.Literal(scala.reflect.basis.Constant(42))
}
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
new file mode 100644
index 0000000000..714816fd62
--- /dev/null
+++ b/test/files/neg/t2442.check
@@ -0,0 +1,9 @@
+t2442.scala:4: error: match may not be exhaustive.
+It would fail on the following input: THREE
+ def f(e: MyEnum) = e match {
+ ^
+t2442.scala:11: error: match may not be exhaustive.
+It would fail on the following input: BLUE
+ def g(e: MySecondEnum) = e match {
+ ^
+two errors found
diff --git a/test/files/neg/t2442.flags b/test/files/neg/t2442.flags
new file mode 100644
index 0000000000..32cf036c3d
--- /dev/null
+++ b/test/files/neg/t2442.flags
@@ -0,0 +1 @@
+-Xexperimental -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2442/MyEnum.java b/test/files/neg/t2442/MyEnum.java
new file mode 100644
index 0000000000..3ffbbb31b8
--- /dev/null
+++ b/test/files/neg/t2442/MyEnum.java
@@ -0,0 +1,3 @@
+public enum MyEnum {
+ ONE, TWO, THREE;
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/MySecondEnum.java b/test/files/neg/t2442/MySecondEnum.java
new file mode 100644
index 0000000000..0f841286de
--- /dev/null
+++ b/test/files/neg/t2442/MySecondEnum.java
@@ -0,0 +1,6 @@
+public enum MySecondEnum {
+ RED(1), BLUE(2) { public void foo() {} };
+ MySecondEnum(int i) {}
+
+ public void foo() {}
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/t2442.scala b/test/files/neg/t2442/t2442.scala
new file mode 100644
index 0000000000..b0a0f3cd41
--- /dev/null
+++ b/test/files/neg/t2442/t2442.scala
@@ -0,0 +1,15 @@
+class Test {
+ import MyEnum._
+
+ def f(e: MyEnum) = e match {
+ case ONE => println("one")
+ case TWO => println("two")
+ // missing case --> exhaustivity warning!
+ }
+
+ import MySecondEnum._
+ def g(e: MySecondEnum) = e match {
+ case RED => println("red")
+ // missing case --> exhaustivity warning!
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
index c01226685f..7bd8ff78f9 100644
--- a/test/files/neg/t4541.check
+++ b/test/files/neg/t4541.check
@@ -1,7 +1,7 @@
-t4541.scala:11: error: scala.reflect.internal.Types$TypeError: variable data in class Sparse cannot be accessed in Sparse[Int]
+t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int]
Access to protected method data not permitted because
prefix type Sparse[Int] does not conform to
class Sparse$mcI$sp where the access take place
that.data
^
-one error found \ No newline at end of file
+one error found
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
index 54d9c3d1ee..8a52fd97f4 100644
--- a/test/files/neg/t4541b.check
+++ b/test/files/neg/t4541b.check
@@ -1,7 +1,7 @@
-t4541b.scala:13: error: scala.reflect.internal.Types$TypeError: variable data in class SparseArray cannot be accessed in SparseArray[Int]
+t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int]
Access to protected method data not permitted because
prefix type SparseArray[Int] does not conform to
class SparseArray$mcI$sp where the access take place
use(that.data.clone)
^
-one error found \ No newline at end of file
+one error found
diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check
new file mode 100644
index 0000000000..b53bbdbd15
--- /dev/null
+++ b/test/files/neg/t4842.check
@@ -0,0 +1,7 @@
+t4842.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+two errors found
diff --git a/test/files/neg/t4842b.scala b/test/files/neg/t4842.scala
index a7996cc061..c6244efda7 100644
--- a/test/files/neg/t4842b.scala
+++ b/test/files/neg/t4842.scala
@@ -1,3 +1,7 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
+
class TypeArg[X](val x: X)(a: AnyRef) {
def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
}
diff --git a/test/files/neg/t4842a.check b/test/files/neg/t4842a.check
deleted file mode 100644
index 39d77bfc48..0000000000
--- a/test/files/neg/t4842a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842a.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this(x: Int) = this(new { println(Foo.this)}) // error
- ^
-one error found
diff --git a/test/files/neg/t4842a.scala b/test/files/neg/t4842a.scala
deleted file mode 100644
index 78360effb4..0000000000
--- a/test/files/neg/t4842a.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Foo (x: AnyRef) {
- def this(x: Int) = this(new { println(Foo.this)}) // error
-}
diff --git a/test/files/neg/t4842b.check b/test/files/neg/t4842b.check
deleted file mode 100644
index c7ccd5e059..0000000000
--- a/test/files/neg/t4842b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842b.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
- ^
-one error found
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
new file mode 100644
index 0000000000..96eb1fd364
--- /dev/null
+++ b/test/files/neg/t5148.check
@@ -0,0 +1,2 @@
+error: bad reference while unpickling Imports.class: term memberHandlers not found in scala.tools.nsc.interpreter.IMain
+one error found
diff --git a/test/files/neg/t5148.scala b/test/files/neg/t5148.scala
new file mode 100644
index 0000000000..fca64e57df
--- /dev/null
+++ b/test/files/neg/t5148.scala
@@ -0,0 +1,4 @@
+package scala.tools.nsc
+package interpreter
+
+class IMain extends Imports
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
new file mode 100644
index 0000000000..89d766fe34
--- /dev/null
+++ b/test/files/neg/t5761.check
@@ -0,0 +1,16 @@
+t5761.scala:4: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:8: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]()) // no crash
+ ^
+t5761.scala:9: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:13: error: not found: type Tread
+ new Tread("sth") { }.run()
+ ^
+four errors found
diff --git a/test/files/neg/t5761.scala b/test/files/neg/t5761.scala
new file mode 100644
index 0000000000..040c4eb75a
--- /dev/null
+++ b/test/files/neg/t5761.scala
@@ -0,0 +1,16 @@
+class D[-A](x: A) { }
+
+object Test1 {
+ println(new D[Int]{}) // crash
+}
+
+object Test2 {
+ println(new D[Int]()) // no crash
+ println(new D[Int]{}) // crash
+}
+
+object Test3 {
+ new Tread("sth") { }.run()
+}
+
+
diff --git a/test/files/neg/t5839.check b/test/files/neg/t5839.check
new file mode 100644
index 0000000000..d4b125bd1e
--- /dev/null
+++ b/test/files/neg/t5839.check
@@ -0,0 +1,6 @@
+t5839.scala:5: error: type mismatch;
+ found : (x: String => String)Int <and> (x: Int)Int
+ required: Int => String
+ val x: String = goo(foo _)
+ ^
+one error found
diff --git a/test/files/neg/t5839.scala b/test/files/neg/t5839.scala
new file mode 100644
index 0000000000..d3a5d4b25f
--- /dev/null
+++ b/test/files/neg/t5839.scala
@@ -0,0 +1,7 @@
+object Test {
+ def goo[T](x: Int => T): T = x(1)
+ implicit def f(x: Int): String = ""
+ def foo(x: Int): Int = x + 1
+ val x: String = goo(foo _)
+ def foo(x: String => String) = 1
+}
diff --git a/test/files/pos/rangepos-anonapply.flags b/test/files/pos/rangepos-anonapply.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/rangepos-anonapply.scala b/test/files/pos/rangepos-anonapply.scala
new file mode 100644
index 0000000000..2f3e4ad6cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.scala
@@ -0,0 +1,9 @@
+class Test {
+ trait PropTraverser {
+ def apply(x: Int): Unit = {}
+ }
+
+ def gather(x: Int) {
+ (new PropTraverser {})(x)
+ }
+}
diff --git a/test/files/pos/t5846.scala b/test/files/pos/t5846.scala
new file mode 100644
index 0000000000..b06f5ac39c
--- /dev/null
+++ b/test/files/pos/t5846.scala
@@ -0,0 +1,10 @@
+
+
+
+
+/** Return the most general sorted map type. */
+object Test extends App {
+
+ val empty: collection.SortedMap[String, String] = collection.SortedMap.empty[String, String]
+
+}
diff --git a/test/files/pos/t5899.flags b/test/files/pos/t5899.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5899.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala
new file mode 100644
index 0000000000..b16f1f84fe
--- /dev/null
+++ b/test/files/pos/t5899.scala
@@ -0,0 +1,19 @@
+import scala.tools.nsc._
+
+trait Foo {
+ val global: Global
+ import global.{Name, Symbol, nme}
+
+ case class Bippy(name: Name)
+
+ def f(x: Bippy, sym: Symbol): Int = {
+ // no warning (!) for
+ // val Stable = sym.name.toTermName
+
+ val Stable = sym.name
+ Bippy(Stable) match {
+ case Bippy(nme.WILDCARD) => 1
+ case Bippy(Stable) => 2 // should not be considered unreachable
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t5932.flags b/test/files/pos/t5932.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5932.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5932.scala b/test/files/pos/t5932.scala
new file mode 100644
index 0000000000..d824523d5b
--- /dev/null
+++ b/test/files/pos/t5932.scala
@@ -0,0 +1,15 @@
+class A
+
+case object B extends A
+
+object Test {
+ val x1 = (B: A)
+
+ println(x1 == B) // no warning
+ println(B == x1) // no warning
+
+ val x2 = (B: A with Product)
+
+ println(x2 == B) // no warning
+ println(B == x2) // spurious warning: "always returns false"
+}
diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala
index 90e7d84646..7ba035ec3b 100644
--- a/test/files/pos/t5953.scala
+++ b/test/files/pos/t5953.scala
@@ -10,7 +10,7 @@ package object foo {
package foo {
object Test {
- def f1[T](xs: Traversable[T]) = xs.convertTo[immutable.Vector]
+ def f1[T](xs: Traversable[T]) = xs.to[immutable.Vector]
def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector]
}
}
diff --git a/test/files/pos/t5968.flags b/test/files/pos/t5968.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5968.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala
new file mode 100644
index 0000000000..0093f84fc0
--- /dev/null
+++ b/test/files/pos/t5968.scala
@@ -0,0 +1,8 @@
+object X {
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ // SI-5986 spurious exhaustivity warning here
+ }
+}
+
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index 9d4674d7c7..4be98a6b21 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -25,7 +25,6 @@ retrieved 126 members
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
[accessible: true] `method contains(elem: Any)Boolean`
-[accessible: true] `method convertTo[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
@@ -111,6 +110,7 @@ retrieved 126 members
[accessible: true] `method toString()String`
[accessible: true] `method toTraversable=> Traversable[B]`
[accessible: true] `method toVector=> Vector[B]`
+[accessible: true] `method to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
index 08d0fa32c5..5e43d25f7e 100644
--- a/test/files/run/collection-conversions.check
+++ b/test/files/run/collection-conversions.check
@@ -11,6 +11,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Vector ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -24,6 +25,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing List ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -37,6 +39,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Buffer ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -50,6 +53,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing ParVector ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -63,6 +67,21 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParArray ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Set ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -76,6 +95,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing SetView ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -89,6 +109,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing BufferView ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -102,3 +123,4 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK \ No newline at end of file
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
index b5c4d8e261..d842742230 100644
--- a/test/files/run/collection-conversions.scala
+++ b/test/files/run/collection-conversions.scala
@@ -1,6 +1,7 @@
import collection._
import mutable.Buffer
import parallel.immutable.ParVector
+import parallel.mutable.ParArray
import reflect.ClassTag
object Test {
@@ -29,30 +30,33 @@ object Test {
val testStream = Stream(1,2,3)
val testArray = Array(1,2,3)
val testParVector = ParVector(1,2,3)
+ val testParArray = ParArray(1,2,3)
def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
val tmp = col
println("-- Testing " + name + " ---")
printResult("[Direct] Vector ", col.toVector, testVector)
- printResult("[Copy] Vector ", col.convertTo[Vector], testVector)
+ printResult("[Copy] Vector ", col.to[Vector], testVector)
printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
- printResult("[Copy] Buffer ", col.convertTo[Buffer], testBuffer)
+ printResult("[Copy] Buffer ", col.to[Buffer], testBuffer)
printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
- printResult("[Copy] GenSeq ", col.convertTo[GenSeq], testGenSeq)
- printResult("[Copy] Seq ", col.convertTo[Seq], testSeq)
+ printResult("[Copy] GenSeq ", col.to[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.to[Seq], testSeq)
printResult("[Direct] Stream ", col.toStream, testStream)
- printResult("[Copy] Stream ", col.convertTo[Stream], testStream)
+ printResult("[Copy] Stream ", col.to[Stream], testStream)
printResult("[Direct] Array ", col.toArray, testArray)
- printResult("[Copy] Array ", col.convertTo[Array], testArray)
- printResult("[Copy] ParVector", col.convertTo[ParVector], testParVector)
+ printResult("[Copy] Array ", col.to[Array], testArray)
+ printResult("[Copy] ParVector", col.to[ParVector], testParVector)
+ printResult("[Copy] ParArray ", col.to[ParArray], testParArray)
}
def main(args: Array[String]): Unit = {
- testConversion("iterator", (1 to 3).iterator)
+ testConversion("iterator", (1 to 3).iterator)
testConversion("Vector", Vector(1,2,3))
testConversion("List", List(1,2,3))
testConversion("Buffer", Buffer(1,2,3))
testConversion("ParVector", ParVector(1,2,3))
+ testConversion("ParArray", ParArray(1,2,3))
testConversion("Set", Set(1,2,3))
testConversion("SetView", Set(1,2,3).view)
testConversion("BufferView", Buffer(1,2,3).view)
diff --git a/test/files/run/enrich-gentraversable.check b/test/files/run/enrich-gentraversable.check
index 348b38d6a4..94c66e3692 100644
--- a/test/files/run/enrich-gentraversable.check
+++ b/test/files/run/enrich-gentraversable.check
@@ -2,3 +2,7 @@ List(2, 4)
Array(2, 4)
HW
Vector(72, 108, 108, 32, 114, 108, 100)
+List(2, 4)
+Array(2, 4)
+HW
+Vector(72, 108, 108, 32, 114, 108, 100)
diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala
index c9320ff985..52eded55fd 100644
--- a/test/files/run/enrich-gentraversable.scala
+++ b/test/files/run/enrich-gentraversable.scala
@@ -1,30 +1,67 @@
object Test extends App {
- import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem }
+ import scala.collection.{GenTraversableOnce,GenTraversableLike}
+ import scala.collection.generic._
def typed[T](t : => T) {}
-
- class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) {
- def filterMap[B, That](f : A => Option[B])(implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq)
+ def testTraversableLike = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ r.flatMap(f(_).toSeq)
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
+
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
}
+ def testTraversableOnce = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ val b = cbf()
+ for(e <- r.seq) f(e) foreach (b +=)
- implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r)
-
- val l = List(1, 2, 3, 4, 5)
- val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
- typed[List[Int]](fml)
- println(fml)
-
- val a = Array(1, 2, 3, 4, 5)
- val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
- typed[Array[Int]](fma)
- println(fma.deep)
+ b.result
+ }
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
- val s = "Hello World"
- val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
- typed[String](fms1)
- println(fms1)
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
- val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
- typed[IndexedSeq[Int]](fms2)
- println(fms2)
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
+ }
+
+ testTraversableLike
+ testTraversableOnce
}
diff --git a/test/files/run/macro-reify-splice-splice/Macros_1.scala b/test/files/run/macro-reify-splice-splice/Macros_1.scala
index 030a0a217e..4f1b600f63 100644
--- a/test/files/run/macro-reify-splice-splice/Macros_1.scala
+++ b/test/files/run/macro-reify-splice-splice/Macros_1.scala
@@ -1,5 +1,4 @@
import scala.reflect.makro.{Context => Ctx}
-import scala.reflect.{mirror => mr}
object Macros {
def foo = macro Impls.foo
diff --git a/test/files/run/t3326.check b/test/files/run/t3326.check
new file mode 100644
index 0000000000..d0e11cebf7
--- /dev/null
+++ b/test/files/run/t3326.check
@@ -0,0 +1,8 @@
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World)
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World) \ No newline at end of file
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
new file mode 100644
index 0000000000..f70cb01504
--- /dev/null
+++ b/test/files/run/t3326.scala
@@ -0,0 +1,74 @@
+
+
+
+import scala.math.Ordering
+
+
+
+/** The heart of the problem - we want to retain the ordering when
+ * using `++` on sorted maps.
+ *
+ * There are 2 `++` overloads - a generic one in traversables and
+ * a map-specific one in `MapLike` - which knows about the ordering.
+ *
+ * The problem here is that the expected return type for the expression
+ * in which `++` appears drives the decision of the overload that needs
+ * to be taken.
+ * The `collection.SortedMap` does not have `++` overridden to return
+ * `SortedMap`, but `immutable.Map` instead.
+ * This is why `collection.SortedMap` used to resort to the generic
+ * `TraversableLike.++` which knows nothing about the ordering.
+ *
+ * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
+ * we override the `MapLike.++` overload in `collection.SortedMap` to return
+ * the proper type `SortedMap`.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ testCollectionSorted()
+ testImmutableSorted()
+ }
+
+ def testCollectionSorted() {
+ import collection._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+
+ def testImmutableSorted() {
+ import collection.immutable._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+}
diff --git a/test/files/run/t4027.check b/test/files/run/t4027.check
new file mode 100644
index 0000000000..bdacfc1c06
--- /dev/null
+++ b/test/files/run/t4027.check
@@ -0,0 +1,12 @@
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!)
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!) \ No newline at end of file
diff --git a/test/files/run/t4027.scala b/test/files/run/t4027.scala
new file mode 100644
index 0000000000..d70ca0cc3a
--- /dev/null
+++ b/test/files/run/t4027.scala
@@ -0,0 +1,27 @@
+
+
+import collection._
+
+
+/** Sorted maps should have `filterKeys` and `mapValues` which return sorted maps.
+ * Mapping, filtering, etc. on these views should return sorted maps again.
+ */
+object Test extends App {
+
+ val sortedmap = SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(sortedmap.filterKeys(_ % 2 == 0): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!"): SortedMap[Int, String])
+ println(sortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!").filter(t => t._1 < 2): SortedMap[Int, String])
+
+ val immsortedmap = immutable.SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(immsortedmap.filterKeys(_ % 2 == 0): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!"): immutable.SortedMap[Int, String])
+ println(immsortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!").filter(t => t._1 < 2): immutable.SortedMap[Int, String])
+
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
new file mode 100644
index 0000000000..ef0493b275
--- /dev/null
+++ b/test/files/run/t4935.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
new file mode 100644
index 0000000000..ac14fe5dbd
--- /dev/null
+++ b/test/files/run/t4935.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
new file mode 100644
index 0000000000..18631e2041
--- /dev/null
+++ b/test/files/run/t4935.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t5914.check b/test/files/run/t5914.check
new file mode 100644
index 0000000000..818e321255
--- /dev/null
+++ b/test/files/run/t5914.check
@@ -0,0 +1 @@
+correct
diff --git a/test/files/run/t5914.scala b/test/files/run/t5914.scala
new file mode 100644
index 0000000000..45d8815738
--- /dev/null
+++ b/test/files/run/t5914.scala
@@ -0,0 +1,23 @@
+import scala.reflect.ClassTag
+
+trait Trees {
+ class Tree
+ implicit val ttTag: ClassTag[TypeTree]
+ type TypeTree <: Tree
+ val TypeTree: TypeTreeExtractor
+ abstract class TypeTreeExtractor {
+ def unapply(t: TypeTree): Option[String]
+ }
+ def test(tree: Tree) =
+ tree match {
+ case TypeTree(_) => println("lolwut")
+ case null => println("correct")
+ }
+}
+
+object Test extends App with Trees {
+ val ttTag = implicitly[ClassTag[TypeTree]]
+ case class TypeTree(meh: String) extends Tree
+ object TypeTree extends TypeTreeExtractor
+ test(null) // should not crash
+} \ No newline at end of file
diff --git a/test/files/run/t5966.check b/test/files/run/t5966.check
new file mode 100644
index 0000000000..bfe8358a77
--- /dev/null
+++ b/test/files/run/t5966.check
@@ -0,0 +1,3 @@
+(o()_)("") = List()
+(o("a1")_)("") = WrappedArray(a1)
+(o("a1", "a2")_)("") = WrappedArray(a1, a2)
diff --git a/test/files/run/t5966.scala b/test/files/run/t5966.scala
new file mode 100644
index 0000000000..bbe1a6e874
--- /dev/null
+++ b/test/files/run/t5966.scala
@@ -0,0 +1,9 @@
+object o { def apply(i: AnyRef*)(j: String) = i }
+
+object Test {
+ def main(args: Array[String]) {
+ println("(o()_)(\"\") = " + (o()_)(""))
+ println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
+ println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
+ }
+}
diff --git a/test/files/run/t5971.check b/test/files/run/t5971.check
new file mode 100644
index 0000000000..0c36a1ff02
--- /dev/null
+++ b/test/files/run/t5971.check
@@ -0,0 +1,4 @@
+r,b
+r
+a,b
+r,a,b \ No newline at end of file
diff --git a/test/files/run/t5971.scala b/test/files/run/t5971.scala
new file mode 100644
index 0000000000..dbd9beebb3
--- /dev/null
+++ b/test/files/run/t5971.scala
@@ -0,0 +1,23 @@
+
+
+
+
+
+/** When using `AbstractTransformed` abstract inner class in views in order
+ * to force generating bridges, one must take care to push the corresponding
+ * collection trait (such as `Iterable` or `Seq`) as far as possible to the
+ * left in the linearization order -- otherwise, overridden methods from these
+ * traits can override the already overridden methods in view. This was the
+ * case with `takeWhile`.
+ * Mind blowing, I know.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ println("bar".view.reverse.filter(_ > 'a').mkString(","))
+ println("bar".view.reverse.take(1).mkString(","))
+ println("bar".view.reverse.dropWhile(_ > 'a').mkString(","))
+ println("bar".view.reverse.takeWhile(_ => true).mkString(","))
+ }
+
+}
diff --git a/test/files/run/t5986.check b/test/files/run/t5986.check
new file mode 100644
index 0000000000..4101770c6d
--- /dev/null
+++ b/test/files/run/t5986.check
@@ -0,0 +1,15 @@
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4) \ No newline at end of file
diff --git a/test/files/run/t5986.scala b/test/files/run/t5986.scala
new file mode 100644
index 0000000000..8cf7086f98
--- /dev/null
+++ b/test/files/run/t5986.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.collection._
+
+
+
+/** A sorted set should not replace elements when adding
+ * and the element already exists in the set.
+ */
+object Test {
+
+ class Foo(val name: String, val n: Int) {
+ override def equals(obj: Any): Boolean = obj match { case other: Foo => name == other.name; case _ => false }
+ override def hashCode = name.##
+ override def toString = "Foo(" + name + ", " + n + ")"
+ }
+
+ implicit val ordering: Ordering[Foo] = Ordering.fromLessThan[Foo] { (a, b) => a.name.compareTo(b.name) < 0 }
+
+ def check[S <: Set[Foo]](set: S) {
+ def output(s: Set[Foo]) = println(s.toList.sorted.mkString(","))
+ output(set + new Foo("bar", 2))
+ output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4)))
+ output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4)))
+ }
+
+ def main(args: Array[String]) {
+ check(Set(new Foo("bar", 1)))
+ check(immutable.Set(new Foo("bar", 1)))
+ check(mutable.Set(new Foo("bar", 1)))
+ check(immutable.SortedSet(new Foo("bar", 1)))
+ check(mutable.SortedSet(new Foo("bar", 1)))
+ }
+
+}
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
index e4b356c889..e2609fa200 100644
--- a/test/files/scalacheck/redblacktree.scala
+++ b/test/files/scalacheck/redblacktree.scala
@@ -121,7 +121,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
case Some((key, _)) => key.init.mkString + "MN"
@@ -144,7 +144,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
- case (key, _) => update(tree, key, newValue)
+ case (key, _) => update(tree, key, newValue, true)
} getOrElse tree
property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>