summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build/genprod.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala251
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala265
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala282
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala38
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala240
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala8
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala7
-rw-r--r--src/library/scala/Function2.scala6
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala8
-rw-r--r--src/library/scala/collection/immutable/Queue.scala9
-rw-r--r--src/library/scala/collection/immutable/Stream.scala6
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/math/Numeric.scala6
-rw-r--r--src/library/scala/util/Sorting.scala712
-rw-r--r--src/library/scala/util/Try.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala48
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala24
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala8
27 files changed, 1452 insertions, 570 deletions
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index ed436fe2e4..b470348e8c 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -123,7 +123,10 @@ object FunctionOne extends Function(1) {
* def apply(x: Int): Int = x + 1
* }
* assert(succ(0) == anonfun1(0))
- * """)
+ * """) + """
+ *
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle."""
override def moreMethods = """
/** Composes two instances of Function1 in a new Function1, with this function applied last.
@@ -178,13 +181,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
*
* {{{
* object Main extends App {%s}
- * }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-"""
+ * }}}"""
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index f866c0d038..76af40b330 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -617,18 +617,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case Some(oldsym) if oldsym.exists && trackedSym.exists =>
- assert(
- // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
- s"""|Different class symbols have the same bytecode-level internal name:
- | name: $internalName
- | oldsym: ${oldsym.fullNameString}
- | tracked: ${trackedSym.fullNameString}
- """.stripMargin
- )
- case _ =>
+ case None =>
reverseJavaName.put(internalName, trackedSym)
+ case Some(oldsym) =>
+ // TODO: `duplicateOk` seems pretty ad-hoc (a more aggressive version caused SI-9356 because it called oldSym.exists, which failed in the unpickler; see also SI-5031)
+ def duplicateOk = oldsym == NoSymbol || trackedSym == NoSymbol || (syntheticCoreClasses contains oldsym) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule))
+ if (oldsym != trackedSym && !duplicateOk)
+ devWarning(s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}""".stripMargin)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
new file mode 100644
index 0000000000..7bbe1e2a49
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
@@ -0,0 +1,251 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.collection.{mutable, immutable}
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter}
+import opt.BytecodeUtils._
+
+object AliasingFrame {
+ private var _idCounter: Long = 0l
+ private def nextId = { _idCounter += 1; _idCounter }
+}
+
+class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) {
+ import Opcodes._
+
+ // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame`
+ def this(src: Frame[_ <: V]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ /**
+ * For each slot (entry in the `values` array of the frame), an id that uniquely represents
+ * the object stored in it. If two values have the same id, they are aliases of the same
+ * object.
+ */
+ private val aliasIds: Array[Long] = Array.fill(nLocals + nStack)(AliasingFrame.nextId)
+
+ /**
+ * The object alias id of for a value index.
+ */
+ def aliasId(entry: Int) = aliasIds(entry)
+
+ /**
+ * Returns the indices of the values array which are aliases of the object `id`.
+ */
+ def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.iterator.filter(i => aliasId(i) == id)
+
+ /**
+ * The set of aliased values for a given entry in the `values` array.
+ */
+ def aliasesOf(entry: Int): Set[Int] = valuesWithAliasId(aliasIds(entry))
+
+ /**
+ * Define a new alias. For example, given
+ * var a = this // this, a have the same aliasId
+ * then an assignment
+ * b = a
+ * will set the same the aliasId for `b`.
+ */
+ private def newAlias(assignee: Int, source: Int): Unit = {
+ aliasIds(assignee) = aliasIds(source)
+ }
+
+ /**
+ * An assignment
+ * a = someUnknownValue()
+ * sets a fresh alias id for `a`.
+ * A stack value is also removed from its alias set when being consumed.
+ */
+ private def removeAlias(assignee: Int): Unit = {
+ aliasIds(assignee) = AliasingFrame.nextId
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = {
+ // Make the extendsion methods easier to use (otherwise we have to repeat `this`.stackTop)
+ def stackTop: Int = this.stackTop
+ def peekStack(n: Int): V = this.peekStack(n)
+
+ // the val pattern `val (p, c) = f` still allocates a tuple (https://github.com/scala-opt/scala/issues/28)
+ val prodCons = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc
+ val consumed = prodCons._1
+ val produced = prodCons._2
+
+ super.execute(insn, interpreter)
+
+ (insn.getOpcode: @switch) match {
+ case ALOAD =>
+ newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`)
+
+ case DUP =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+
+ case DUP_X1 =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+
+ case DUP_X2 =>
+ // Check if the second element on the stack is size 2
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2
+ val isSize2 = peekStack(1).getSize == 2
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ if (isSize2) {
+ // Size 2 values on the stack only take one slot in the `values` array
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ } else {
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top, source = top - 2)
+ }
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+
+ case DUP2_X2 =>
+ val top = stackTop
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) {
+ // Form 4
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ // Form 2
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) {
+ // Form 3
+ newAlias(assignee = top - 3, source = top)
+ newAlias(assignee = top - 4, source = top - 1)
+ } else {
+ // Form 1
+ newAlias(assignee = top - 3, source = top - 5)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+ }
+
+ case SWAP =>
+ val top = stackTop
+ val idTop = aliasIds(top)
+ aliasIds(top) = aliasIds(top - 1)
+ aliasIds(top - 1) = idTop
+
+ case opcode =>
+ if (opcode == ASTORE) {
+ // Not a separate case because we need to remove the consumed stack value from alias sets after.
+ val stackTopBefore = stackTop - produced + consumed
+ val local = insn.asInstanceOf[VarInsnNode].`var`
+ newAlias(assignee = local, source = stackTopBefore)
+ // if the value written is size 2, it overwrites the subsequent slot, which is then no
+ // longer an alias of anything. see the corresponding case in `Frame.execute`.
+ if (getLocal(local).getSize == 2)
+ removeAlias(local + 1)
+
+ // if the value at the preceding index is size 2, it is no longer valid, so we remove its
+ // aliasing. see corresponding case in `Frame.execute`
+ if (local > 0) {
+ val precedingValue = getLocal(local - 1)
+ if (precedingValue != null && precedingValue.getSize == 2)
+ removeAlias(local - 1)
+ }
+ }
+
+ // Remove consumed stack values from aliasing sets.
+ // Example: iadd
+ // - before: local1, local2, stack1, consumed1, consumed2
+ // - after: local1, local2, stack1, produced1 // stackTop = 3
+ val firstConsumed = stackTop - produced + 1 // firstConsumed = 3
+ for (i <- 0 until consumed)
+ removeAlias(firstConsumed + i) // remove aliases for 3 and 4
+
+ // We don't need to set the aliases ids for the produced values: the aliasIds array already
+ // contains fresh ids for non-used stack values (ensured by removeAlias).
+ }
+ }
+
+ /**
+ * Merge the AliasingFrame `other` into this AliasingFrame.
+ *
+ * Aliases that are common in both frames are kept. Example:
+ *
+ * var x, y = null
+ * if (...) {
+ * x = a
+ * y = a // (x, y, a) are aliases
+ * } else {
+ * x = a
+ * y = b // (x, a) and (y, b)
+ * }
+ * [...] // (x, a)
+ */
+ override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = {
+ val valuesChanged = super.merge(other, interpreter)
+ var aliasesChanged = false
+ val aliasingOther = other.asInstanceOf[AliasingFrame[_]]
+ for (i <- aliasIds.indices) {
+ val thisAliases = aliasesOf(i)
+ val thisNotOther = thisAliases diff (thisAliases intersect aliasingOther.aliasesOf(i))
+ if (thisNotOther.nonEmpty) {
+ aliasesChanged = true
+ thisNotOther foreach removeAlias
+ }
+ }
+ valuesChanged || aliasesChanged
+ }
+
+ override def init(src: Frame[_ <: V]): Frame[V] = {
+ super.init(src)
+ compat.Platform.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliasIds, 0, aliasIds, 0, aliasIds.length)
+ this
+ }
+}
+
+/**
+ * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis
+ * needs to track aliases, but doesn't require a more specific Frame subclass.
+ */
+class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
new file mode 100644
index 0000000000..98e93c125b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
@@ -0,0 +1,265 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.Type
+import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Value}
+import opt.BytecodeUtils._
+import collection.immutable
+
+object InstructionStackEffect {
+ private var cache: immutable.IntMap[(Int, Int)] = immutable.IntMap.empty
+ private def t(x: Int, y: Int): (Int, Int) = {
+ // x can go up to 255 (number of parameters of a method, dimensions in multianewarray) we cache
+ // x up to 10, which covers most cases and limits the cache. y doesn't go above 6 (see cases).
+ if (x > 10 || y > 6) (x, y)
+ else {
+ val key = (x << 8) + y // this would work for any x < 256
+ if (cache contains key) {
+ cache(key)
+ } else {
+ val r = (x, y)
+ cache += key -> r
+ r
+ }
+ }
+ }
+
+ /**
+ * Returns a pair with the number of stack values consumed and produced by `insn`.
+ * This method requires the `frame` to be in the state **before** executing / interpreting
+ * the `insn`.
+ */
+ def apply[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): (Int, Int) = {
+ def peekStack(n: Int): V = frame.peekStack(n)
+
+ (insn.getOpcode: @switch) match {
+ // The order of opcodes is the same as in Frame.execute.
+ case NOP => t(0, 0)
+
+ case ACONST_NULL |
+ ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 |
+ LCONST_0 |
+ LCONST_1 |
+ FCONST_0 |
+ FCONST_1 |
+ FCONST_2 |
+ DCONST_0 |
+ DCONST_1 |
+ BIPUSH |
+ SIPUSH |
+ LDC |
+ ILOAD |
+ LLOAD |
+ FLOAD |
+ DLOAD |
+ ALOAD => t(0, 1)
+
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => t(2, 1)
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => t(1, 0)
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => t(3, 0)
+
+ case POP => t(1, 0)
+
+ case POP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(1, 0) else t(2, 0)
+
+ case DUP => t(0, 1)
+
+ case DUP_X1 => t(2, 3)
+
+ case DUP_X2 =>
+ val isSize2 = peekStack(1).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(0, 1) else t(0, 2)
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2_X2 =>
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) t(2, 3) else t(3, 4)
+ } else {
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) t(3, 5) else t(4, 6)
+ }
+
+ case SWAP => t(2, 2)
+
+ case IADD |
+ LADD |
+ FADD |
+ DADD |
+ ISUB |
+ LSUB |
+ FSUB |
+ DSUB |
+ IMUL |
+ LMUL |
+ FMUL |
+ DMUL |
+ IDIV |
+ LDIV |
+ FDIV |
+ DDIV |
+ IREM |
+ LREM |
+ FREM |
+ DREM => t(2, 1)
+
+ case INEG |
+ LNEG |
+ FNEG |
+ DNEG => t(1, 1)
+
+ case ISHL |
+ LSHL |
+ ISHR |
+ LSHR |
+ IUSHR |
+ LUSHR |
+ IAND |
+ LAND |
+ IOR |
+ LOR |
+ IXOR |
+ LXOR => t(2, 1)
+
+ case IINC => t(0, 0)
+
+ case I2L |
+ I2F |
+ I2D |
+ L2I |
+ L2F |
+ L2D |
+ F2I |
+ F2L |
+ F2D |
+ D2I |
+ D2L |
+ D2F |
+ I2B |
+ I2C |
+ I2S => t(1, 1)
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => t(2, 1)
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => t(1, 0)
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => t(2, 0)
+
+ case GOTO => t(0, 0)
+
+ case JSR => t(0, 1)
+
+ case RET => t(0, 0)
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => t(1, 0)
+
+ case IRETURN |
+ LRETURN |
+ FRETURN |
+ DRETURN |
+ ARETURN => t(1, 0) // Frame.execute consumes one stack value
+
+ case RETURN => t(0, 0) // Frame.execute does not change the stack
+
+ case GETSTATIC => t(0, 1)
+
+ case PUTSTATIC => t(1, 0)
+
+ case GETFIELD => t(1, 1)
+
+ case PUTFIELD => t(2, 0)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1)
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case INVOKEDYNAMIC =>
+ val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case NEW => t(0, 1)
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => t(1, 1)
+
+ case ATHROW => t(1, 0) // Frame.execute consumes one stack value
+
+ case CHECKCAST |
+ INSTANCEOF => t(1, 1) // Frame.execute does push(pop()) for both of them
+
+ case MONITORENTER |
+ MONITOREXIT => t(1, 0)
+
+ case MULTIANEWARRAY => t(insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1)
+
+ case IFNULL |
+ IFNONNULL => t(1, 0)
+ }
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
new file mode 100644
index 0000000000..31710dcbee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
@@ -0,0 +1,282 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import java.util
+
+import scala.annotation.switch
+import scala.tools.asm.{Type, Opcodes}
+import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Analyzer, Interpreter, Value}
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils
+import BytecodeUtils._
+
+/**
+ * Some notes on the ASM ananlyzer framework.
+ *
+ * Value
+ * - Abstract, needs to be implemented for each analysis.
+ * - Represents the desired information about local variables and stack values, for example:
+ * - Is this value known to be null / not null?
+ * - What are the instructions that could potentially have produced this value?
+ *
+ * Interpreter
+ * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing
+ * interpreter, e.g., SourceInterpreter or BasicInterpreter.
+ * - Multiple abstract methods that receive an instruction and the instruction's input values, and
+ * return a value representing the result of that instruction.
+ * - Note: due to control flow, the interpreter can be invoked multiple times for the same
+ * instruction, until reaching a fixed point.
+ * - Abstract `merge` function that computes the least upper bound of two values. Used by
+ * Frame.merge (see below).
+ *
+ * Frame
+ * - Can be used directly for many analyses, no subclass required.
+ * - Every frame has an array of values: one for each local variable and for each stack slot.
+ * - A `top` index stores the index of the current stack top
+ * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty
+ * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next
+ * stack value.
+ * - Defines the `execute(instruction)` method.
+ * - executing mutates the state of the frame according to the effect of the instruction
+ * - pop consumed values from the stack
+ * - pass them to the interpreter together with the instruction
+ * - if applicable, push the resulting value on the stack
+ * - Defines the `merge(otherFrame)` method
+ * - called by the analyzer when multiple control flow paths lead to an instruction
+ * - the frame at the branching instruction is merged into the current frame of the
+ * instruction (held by the analyzer)
+ * - mutates the values of the current frame, merges all values using interpreter.merge.
+ *
+ * Analyzer
+ * - Stores a frame for each instruction
+ * - `merge` function takes an instruction and a frame, merges the existing frame for that instr
+ * (from the frames array) with the new frame passed as argument.
+ * if the frame changed, puts the instruction on the work queue (fixpiont).
+ * - initial frame: initialized for first instr by calling interpreter.new[...]Value
+ * for each slot (locals and params), stored in frames[firstInstr] by calling `merge`
+ * - work queue of instructions (`queue` array, `top` index for next instruction to analyze)
+ * - analyze(method): simulate control flow. while work queue non-empty:
+ * - copy the state of `frames[instr]` into a local frame `current`
+ * - call `current.execute(instr, interpreter)`, mutating the `current` frame
+ * - if it's a branching instruction
+ * - for all potential destination instructions
+ * - merge the destination instruction frame with the `current` frame
+ * (this enqueues the destination instr if its frame changed)
+ * - invoke `newControlFlowEdge` (see below)
+ * - the analyzer also tracks active exception handlers at each instruction
+ * - the empty method `newControlFlowEdge` can be overridden to track control flow if required
+ *
+ *
+ * Some notes on nullness analysis.
+ *
+ * For an instance method, `this` is non-null at entry. So we have to return a NotNull value when
+ * the analyzer is initializing the first frame of a method (see above). This required a change of
+ * the analyzer: before it would simply call `interpreter.newValue`, where we don't have the
+ * required context. See https://github.com/scala/scala-asm/commit/8133d75032.
+ *
+ * After some operations we know that a certain value is not null (e.g. the receiver of an instance
+ * call). However, the receiver is an value on the stack and consumed while interpreting the
+ * instruction - so we can only gain some knowledge if we know that the receiver was an alias of
+ * some other local variable or stack slot. Therefore we use the AliasingFrame class.
+ *
+ * TODO:
+ * Finally, we'd also like to exploit the knowledge gained from `if (x == null)` tests: x is known
+ * to be null in one branch, not null in the other. This will make use of alias tracking as well.
+ * We still have to figure out how to do this exactly in the analyzer framework.
+ */
+
+/**
+ * Type to represent nullness of values.
+ */
+sealed trait Nullness {
+ final def merge(other: Nullness) = if (this == other) this else Unknown
+}
+case object NotNull extends Nullness
+case object Unknown extends Nullness
+case object Null extends Nullness
+
+/**
+ * Represents the nullness state for a local variable or stack value.
+ *
+ * Note that nullness of primitive values is not tracked, it will be always [[Unknown]].
+ */
+sealed trait NullnessValue extends Value {
+ /**
+ * The nullness of this value.
+ */
+ def nullness: Nullness
+
+ /**
+ * True if this value is a long or double. The Analyzer framework needs to know
+ * the size of each value when interpreting instructions, see `Frame.execute`.
+ */
+ def isSize2: Boolean
+ /**
+ * The size of the slot described by this value. Cannot be 0 because no values are allocated
+ * for void-typed slots, see NullnessInterpreter.newValue.
+ **/
+ def getSize: Int = if (isSize2) 2 else 1
+
+ def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2)
+}
+
+object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" }
+object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" }
+object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" }
+object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" }
+
+object NullnessValue {
+ def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = {
+ if (nullness == Null) NullValue
+ else if (nullness == NotNull) NotNullValue
+ else if (isSize2) UnknownValue2
+ else UnknownValue1
+ }
+
+ def apply(nullness: Nullness, insn: AbstractInsnNode): NullnessValue = {
+ apply(nullness, isSize2 = BytecodeUtils.instructionResultSize(insn) == 2)
+ }
+}
+
+final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) {
+ def newValue(tp: Type): NullnessValue = {
+ // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter,
+ // which is provided by the framework.
+ //
+ // (1) For the void type, the ASM framework expects newValue to return `null`.
+ // Also, the Frame.returnValue field is `null` for methods with return type void.
+ // Example callsite passing VOID_TYPE: in Analyzer, `newValue(Type.getReturnType(m.desc))`.
+ //
+ // (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes
+ // `newValue(null)` for each local variable. We have to return a value of size 1.
+ if (tp == Type.VOID_TYPE) null // (1)
+ else NullnessValue(Unknown, isSize2 = tp != null /*(2)*/ && tp.getSize == 2 )
+ }
+
+ override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = {
+ // For instance methods, the `this` parameter is known to be not null.
+ if (isInstanceMethod && local == 0) NullnessValue(NotNull, isSize2 = false)
+ else super.newParameterValue(isInstanceMethod, local, tp)
+ }
+
+ def newOperation(insn: AbstractInsnNode): NullnessValue = {
+ val nullness = (insn.getOpcode: @switch) match {
+ case Opcodes.ACONST_NULL => Null
+
+ case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match {
+ case _: String | _: Type => NotNull
+ case _ => Unknown
+ }
+
+ case _ => Unknown
+ }
+
+ // for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call.
+ NullnessValue(nullness, insn)
+ }
+
+ def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value
+
+ def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.CHECKCAST => value
+
+ case Opcodes.NEWARRAY |
+ Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false)
+
+ case _ => NullnessValue(Unknown, insn)
+ }
+
+ def binaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, insn)
+ }
+
+ def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, isSize2 = false)
+ }
+
+ def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.MULTIANEWARRAY =>
+ NullnessValue(NotNull, isSize2 = false)
+
+ case _ =>
+ // TODO: use a list of methods that are known to return non-null values
+ NullnessValue(Unknown, insn)
+ }
+
+ def returnOperation(insn: AbstractInsnNode, value: NullnessValue, expected: NullnessValue): Unit = ()
+
+ def merge(a: NullnessValue, b: NullnessValue): NullnessValue = a merge b
+}
+
+class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessValue](nLocals, nStack) {
+ // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame`
+ def this(src: Frame[_ <: NullnessValue]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = {
+ import Opcodes._
+
+ // get the object id of the object that is known to be not-null after this operation
+ val nullCheckedAliasId: Long = (insn.getOpcode: @switch) match {
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD =>
+ aliasId(this.stackTop - 1)
+
+ case IASTORE |
+ FASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE |
+ LASTORE |
+ DASTORE =>
+ aliasId(this.stackTop - 2)
+
+ case GETFIELD =>
+ aliasId(this.stackTop)
+
+ case PUTFIELD =>
+ aliasId(this.stackTop - 1)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val numArgs = Type.getArgumentTypes(desc).length
+ aliasId(this.stackTop - numArgs)
+
+ case ARRAYLENGTH |
+ MONITORENTER |
+ MONITOREXIT =>
+ aliasId(this.stackTop)
+
+ case _ =>
+ -1
+ }
+
+ super.execute(insn, interpreter)
+
+ if (nullCheckedAliasId != -1) {
+ for (i <- valuesWithAliasId(nullCheckedAliasId))
+ this.setValue(i, NotNullValue)
+ }
+ }
+}
+
+/**
+ * This class is required to override the `newFrame` methods, which makes makes sure the analyzer
+ * uses NullnessFrames.
+ */
+class NullnessAnalyzer extends Analyzer[NullnessValue](new NullnessInterpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
index 201ab15177..9bd016f964 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -170,6 +170,8 @@ object BytecodeUtils {
new InsnNode(op)
}
+ def instructionResultSize(instruction: AbstractInsnNode) = InstructionResultSize(instruction)
+
def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
@@ -328,13 +330,38 @@ object BytecodeUtils {
class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) {
val analyzer = new Analyzer(interpreter)
analyzer.analyze(classInternalName, methodNode)
- def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
+ def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode)
+ }
+
+ implicit class AnalyzerExtensions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal {
+ def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
}
- implicit class `frame extensions`[V <: Value](val frame: Frame[V]) extends AnyVal {
- def peekDown(n: Int): V = {
- val topIndex = frame.getStackSize - 1
- frame.getStack(topIndex - n)
+ implicit class FrameExtensions[V <: Value](val frame: Frame[V]) extends AnyVal {
+ /**
+ * The value `n` positions down the stack.
+ */
+ def peekStack(n: Int): V = frame.getStack(frame.getStackSize - 1 - n)
+
+ /**
+ * The index of the current stack top.
+ */
+ def stackTop = frame.getLocals + frame.getStackSize - 1
+
+ /**
+ * Gets the value at slot i, where i may be a local or a stack index.
+ */
+ def getValue(i: Int): V = {
+ if (i < frame.getLocals) frame.getLocal(i)
+ else frame.getStack(i - frame.getLocals)
+ }
+
+ /**
+ * Sets the value at slot i, where i may be a local or a stack index.
+ */
+ def setValue(i: Int, value: V): Unit = {
+ if (i < frame.getLocals) frame.setLocal(i, value)
+ else frame.setStack(i - frame.getLocals, value)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
index 028f0f8fa6..0932564b1f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -8,12 +8,15 @@ package backend.jvm
package opt
import scala.reflect.internal.util.{NoPosition, Position}
+import scala.tools.asm.tree.analysis.{Value, Analyzer, BasicInterpreter}
+import scala.tools.asm.{Opcodes, Type}
import scala.tools.asm.tree._
import scala.collection.convert.decorateAsScala._
-import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InternalName}
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import scala.tools.nsc.backend.jvm.analysis.{NotNull, NullnessAnalyzer}
import ByteCodeRepository.{Source, CompilationUnit}
+import BytecodeUtils._
class CallGraph[BT <: BTypes](val btypes: BT) {
import btypes._
@@ -93,12 +96,25 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
// TODO: run dataflow analyses to make the call graph more precise
// - producers to get forwarded parameters (ForwardedParam)
// - typeAnalysis for more precise argument types, more precise callee
- // - nullAnalysis to skip emitting the receiver-null-check when inlining
- // TODO: for now we run a basic analyzer to get the stack height at the call site.
- // once we run a more elaborate analyzer (types, nullness), we can get the stack height out of there.
+ // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance
+ // call is known to be not-null, in which case we don't have to emit a null check when inlining.
+ // It is also used to get the stack height at the call site.
localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName)
- val analyzer = new AsmAnalyzer(methodNode, definingClass.internalName)
+
+ val analyzer: Analyzer[_ <: Value] = {
+ if (compilerSettings.YoptNullnessTracking) new NullnessAnalyzer
+ else new Analyzer(new BasicInterpreter)
+ }
+ analyzer.analyze(definingClass.internalName, methodNode)
+
+ def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = analyzer match {
+ case nullnessAnalyzer: NullnessAnalyzer =>
+ val frame = nullnessAnalyzer.frameAt(call, methodNode)
+ frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull
+
+ case _ => false
+ }
methodNode.instructions.iterator.asScala.collect({
case call: MethodInsnNode =>
@@ -126,13 +142,19 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
Nil
}
+ val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || {
+ val numArgs = Type.getArgumentTypes(call.desc).length
+ receiverNotNullByAnalysis(call, numArgs)
+ }
+
Callsite(
callsiteInstruction = call,
callsiteMethod = methodNode,
callsiteClass = definingClass,
callee = callee,
argInfos = argInfos,
- callsiteStackHeight = analyzer.frameAt(call).getStackSize,
+ callsiteStackHeight = analyzer.frameAt(call, methodNode).getStackSize,
+ receiverKnownNotNull = receiverNotNull,
callsitePosition = callsitePositions.getOrElse(call, NoPosition)
)
}).toList
@@ -154,7 +176,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
*/
final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType,
callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo],
- callsiteStackHeight: Int, callsitePosition: Position) {
+ callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position) {
override def toString =
"Invocation of" +
s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" +
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
index ac5c9ce2e6..814c78b69c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -49,7 +49,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
if (callGraph.callsites contains request.callsiteInstruction) {
val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass,
callee.callee, callee.calleeDeclarationClass,
- receiverKnownNotNull = false, keepLineNumbers = false)
+ request.receiverKnownNotNull, keepLineNumbers = false)
for (warning <- r) {
if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) {
@@ -89,7 +89,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
*/
def selectCallsitesForInlining: List[Callsite] = {
callsites.valuesIterator.filter({
- case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
val res = doInlineCallsite(callsite)
if (!res) {
@@ -112,7 +112,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
res
- case Callsite(ins, _, _, Left(warning), _, _, pos) =>
+ case Callsite(ins, _, _, Left(warning), _, _, _, pos) =>
if (warning.emitWarning(compilerSettings))
backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
false
@@ -123,7 +123,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* The current inlining heuristics are simple: inline calls to methods annotated @inline.
*/
def doInlineCallsite(callsite: Callsite): Boolean = callsite match {
- case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline
else annotatedInline && safeToInline
@@ -189,7 +189,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
// there's no need to run eliminateUnreachableCode here. building the call graph does that
// already, no code can become unreachable in the meantime.
val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter)
- val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekDown(traitMethodArgumentTypes.length)
+ val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekStack(traitMethodArgumentTypes.length)
for (i <- receiverValue.insns.asScala) {
val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName)
callsite.callsiteMethod.instructions.insert(i, cast)
@@ -215,6 +215,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
calleeInfoWarning = infoWarning)),
argInfos = Nil,
callsiteStackHeight = callsite.callsiteStackHeight,
+ receiverKnownNotNull = callsite.receiverKnownNotNull,
callsitePosition = callsite.callsitePosition
)
callGraph.callsites(newCallsiteInstruction) = staticCallsite
@@ -400,7 +401,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
val inlinedReturn = instructionMap(originalReturn)
val returnReplacement = new InsnList
- def drop(slot: Int) = returnReplacement add getPop(frame.peekDown(slot).getSize)
+ def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize)
// for non-void methods, store the stack top into the return local variable
if (hasReturnValue) {
@@ -444,6 +445,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
callee = originalCallsite.callee,
argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them)
callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight,
+ receiverKnownNotNull = originalCallsite.receiverKnownNotNull,
callsitePosition = originalCallsite.callsitePosition
)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
new file mode 100644
index 0000000000..8d744f6d13
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
@@ -0,0 +1,240 @@
+package scala.tools.nsc.backend.jvm.opt
+
+import scala.annotation.switch
+import scala.tools.asm.{Handle, Type, Opcodes}
+import scala.tools.asm.tree._
+
+object InstructionResultSize {
+ import Opcodes._
+ def apply(instruction: AbstractInsnNode): Int = (instruction.getOpcode: @switch) match {
+ // The order of opcodes is (almost) the same as in Opcodes.java
+ case ACONST_NULL => 1
+
+ case ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 => 1
+
+ case LCONST_0 |
+ LCONST_1 => 2
+
+ case FCONST_0 |
+ FCONST_1 |
+ FCONST_2 => 1
+
+ case DCONST_0 |
+ DCONST_1 => 2
+
+ case BIPUSH |
+ SIPUSH => 1
+
+ case LDC =>
+ instruction.asInstanceOf[LdcInsnNode].cst match {
+ case _: java.lang.Integer |
+ _: java.lang.Float |
+ _: String |
+ _: Type |
+ _: Handle => 1
+
+ case _: java.lang.Long |
+ _: java.lang.Double => 2
+ }
+
+ case ILOAD |
+ FLOAD |
+ ALOAD => 1
+
+ case LLOAD |
+ DLOAD => 2
+
+ case IALOAD |
+ FALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => 1
+
+ case LALOAD |
+ DALOAD => 2
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => 0
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => 0
+
+ case POP |
+ POP2 => 0
+
+ case DUP |
+ DUP_X1 |
+ DUP_X2 |
+ DUP2 |
+ DUP2_X1 |
+ DUP2_X2 |
+ SWAP => throw new IllegalArgumentException("Can't compute the size of DUP/SWAP without knowing what's on stack top")
+
+ case IADD |
+ FADD => 1
+
+ case LADD |
+ DADD => 2
+
+ case ISUB |
+ FSUB => 1
+
+ case LSUB |
+ DSUB => 2
+
+ case IMUL |
+ FMUL => 1
+
+ case LMUL |
+ DMUL => 2
+
+ case IDIV |
+ FDIV => 1
+
+ case LDIV |
+ DDIV => 2
+
+ case IREM |
+ FREM => 1
+
+ case LREM |
+ DREM => 2
+
+ case INEG |
+ FNEG => 1
+
+ case LNEG |
+ DNEG => 2
+
+ case ISHL |
+ ISHR => 1
+
+ case LSHL |
+ LSHR => 2
+
+ case IUSHR => 1
+
+ case LUSHR => 2
+
+ case IAND |
+ IOR |
+ IXOR => 1
+
+ case LAND |
+ LOR |
+ LXOR => 2
+
+ case IINC => 1
+
+ case I2F |
+ L2I |
+ L2F |
+ F2I |
+ D2I |
+ D2F |
+ I2B |
+ I2C |
+ I2S => 1
+
+ case I2L |
+ I2D |
+ L2D |
+ F2L |
+ F2D |
+ D2L => 2
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => 1
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => 0
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => 0
+
+ case GOTO => 0
+
+ case JSR => throw new IllegalArgumentException("Subroutines are not supported.")
+
+ case RET => 0
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => 0
+
+ case IRETURN |
+ FRETURN |
+ ARETURN => 1
+
+ case LRETURN |
+ DRETURN => 2
+
+ case RETURN => 0
+
+ case GETSTATIC => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTSTATIC => 0
+
+ case GETFIELD => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTFIELD => 0
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = instruction.asInstanceOf[MethodInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case INVOKEDYNAMIC =>
+ val desc = instruction.asInstanceOf[InvokeDynamicInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case NEW => 1
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => 1
+
+ case ATHROW => 0
+
+ case CHECKCAST |
+ INSTANCEOF => 1
+
+ case MONITORENTER |
+ MONITOREXIT => 0
+
+ case MULTIANEWARRAY => 1
+
+ case IFNULL |
+ IFNONNULL => 0
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 35ee889c58..953e43eaca 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -234,15 +234,16 @@ trait ScalaSettings extends AbsScalaSettings
val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
- val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled")
- val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath")
+ val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.")
+ val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled.")
+ val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath.")
val lNone = Choice("l:none", "Don't enable any optimizations.")
private val defaultChoices = List(unreachableCode)
val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
- private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals, nullnessTracking)
val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
private val projectChoices = List(lMethod, inlineProject)
@@ -264,6 +265,7 @@ trait ScalaSettings extends AbsScalaSettings
def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
+ def YoptNullnessTracking = Yopt.contains(YoptChoices.nullnessTracking)
def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject)
def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal)
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index e13aaad7bc..15d0f14938 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013
+// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015
package scala
@@ -26,12 +26,6 @@ package scala
* assert(javaVersion() == anonfun0())
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 620dcc19aa..572901c6f3 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -25,11 +25,8 @@ package scala
* }
* }}}
*
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle.
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 5690adb56a..e2c094ea40 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -25,12 +25,6 @@ package scala
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 254f14f13c..82e38d3549 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -462,6 +462,7 @@ object List extends SeqFactory[List] {
private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
private def writeObject(out: ObjectOutputStream) {
+ out.defaultWriteObject()
var xs: List[A] = orig
while (!xs.isEmpty) {
out.writeObject(xs.head)
@@ -473,6 +474,7 @@ object List extends SeqFactory[List] {
// Java serialization calls this before readResolve during de-serialization.
// Read the whole list and store it in `orig`.
private def readObject(in: ObjectInputStream) {
+ in.defaultReadObject()
val builder = List.newBuilder[A]
while (true) in.readObject match {
case ListSerializeEnd =>
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index f1ac161e9a..28e56a6d87 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -12,6 +12,9 @@ package immutable
import mutable.{ Builder, ListBuffer }
+// TODO: Now the specialization exists there is no clear reason to have
+// separate classes for Range/NumericRange. Investigate and consolidate.
+
/** `NumericRange` is a more generic version of the
* `Range` class which works with arbitrary types.
* It must be supplied with an `Integral` implementation of the
@@ -28,9 +31,6 @@ import mutable.{ Builder, ListBuffer }
* assert(r1 sameElements r2.map(_ - veryBig))
* }}}
*
- * TODO: Now the specialization exists there is no clear reason to have
- * separate classes for Range/NumericRange. Investigate and consolidate.
- *
* @author Paul Phillips
* @version 2.8
* @define Coll `NumericRange`
@@ -266,7 +266,7 @@ object NumericRange {
// Numbers may be big.
val one = num.one
val limit = num.fromInt(Int.MaxValue)
- def check(t: T): T =
+ def check(t: T): T =
if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.")
else t
// If the range crosses zero, it might overflow when subtracted
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 98266716cc..53af3ce158 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -56,11 +56,12 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
* @throws java.util.NoSuchElementException if the queue is too short.
*/
override def apply(n: Int): A = {
- val len = out.length
- if (n < len) out.apply(n)
+ val olen = out.length
+ if (n < olen) out.apply(n)
else {
- val m = n - len
- if (m < in.length) in.reverse.apply(m)
+ val m = n - olen
+ val ilen = in.length
+ if (m < ilen) in.apply(ilen - m - 1)
else throw new NoSuchElementException("index out of range")
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index cf7b7e272a..7edd36dc22 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1180,7 +1180,13 @@ object Stream extends SeqFactory[Stream] {
* to streams.
*/
class ConsWrapper[A](tl: => Stream[A]) {
+ /** Construct a stream consisting of a given first element followed by elements
+ * from a lazily evaluated Stream.
+ */
def #::(hd: A): Stream[A] = cons(hd, tl)
+ /** Construct a stream consisting of the concatenation of the given stream and
+ * a lazily evaluated Stream.
+ */
def #:::(prefix: Stream[A]): Stream[A] = prefix append tl
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 8faaf97741..f9bab40a1e 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -15,7 +15,7 @@ import immutable.{List, Nil, ::}
import java.io._
import scala.annotation.migration
-/** A `Buffer` implementation back up by a list. It provides constant time
+/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
*
* @author Matthias Zenger
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index d6e2963ad8..6bb35606a6 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -49,7 +49,7 @@ object BigDecimal {
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Double.toString(d), mc))
+ new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
@@ -59,7 +59,7 @@ object BigDecimal {
* `0.1 != 0.1f`.
*/
def decimal(f: Float, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Float.toString(f), mc))
+ new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index eafbf96993..9245798c17 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -134,7 +134,7 @@ object Numeric {
def div(x: Float, y: Float): Float = x / y
}
trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] {
- def quot(x: Float, y: Float): Float = (BigDecimal(x) / BigDecimal(y)).floatValue
+ def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue
def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue
}
implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering
@@ -158,7 +158,7 @@ object Numeric {
def div(x: Double, y: Double): Double = x / y
}
trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] {
- def quot(x: Double, y: Double): Double = (BigDecimal(x) / BigDecimal(y)).doubleValue
+ def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue
def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue
}
@@ -178,7 +178,7 @@ object Numeric {
def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
}
trait BigDecimalAsIfIntegral extends BigDecimalIsConflicted with Integral[BigDecimal] {
- def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
+ def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x quot y
def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y
}
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 2e021ad9d9..ee2bdbc4a7 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson **
+** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,518 +9,276 @@
package scala
package util
-import scala.reflect.{ ClassTag, classTag }
-import scala.math.{ Ordering, max, min }
+import scala.reflect.ClassTag
+import scala.math.Ordering
-/** The Sorting object provides functions that can sort various kinds of
- * objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as [[scala.math.Ordered]]. Some sorts that
- * operate directly on a subset of value types are also provided. These
- * implementations are derived from those in the Sun JDK.
+/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`.
+ * Methods that defer to `java.util.Arrays.sort` say that they do or under what
+ * conditions that they do.
*
- * Note that stability doesn't matter for value types, so use the `quickSort`
- * variants for those. `stableSort` is intended to be used with
- * objects when the prior ordering should be preserved, where possible.
+ * `Sorting` also implements a general-purpose quicksort and stable (merge) sort
+ * for those cases where `java.util.Arrays.sort` could only be used at the cost
+ * of a large memory penalty. If performance rather than memory usage is the
+ * primary concern, one may wish to find alternate strategies to use
+ * `java.util.Arrays.sort` directly e.g. by boxing primitives to use
+ * a custom ordering on them.
+ *
+ * `Sorting` provides methods where you can provide a comparison function, or
+ * can request a sort of items that are [[scala.math.Ordered]] or that
+ * otherwise have an implicit or explicit [[scala.math.Ordering]].
+ *
+ * Note also that high-performance non-default sorts for numeric types
+ * are not provided. If this is required, it is advisable to investigate
+ * other libraries that cover this use case.
*
* @author Ross Judson
- * @version 1.0
+ * @author Adriaan Moors
+ * @author Rex Kerr
+ * @version 1.1
*/
object Sorting {
- /** Quickly sort an array of Doubles. */
- def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of items with an implicit Ordering. */
- def quickSort[K: Ordering](a: Array[K]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Ints. */
- def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Floats. */
- def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
-
- /** Sort an array of K where K is Ordered, preserving the existing order
- * where the values are equal. */
- def stableSort[K: ClassTag: Ordering](a: Array[K]) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _)
- }
+ /** Sort an array of Doubles using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a)
- /** Sorts an array of `K` given an ordering function `f`.
- * `f` should return `true` iff its first parameter is strictly less than its second parameter.
- */
- def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), f)
- }
+ /** Sort an array of Ints using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a)
- /** Sorts an arbitrary sequence into an array, given a comparison function
- * that should return `true` iff parameter one is strictly less than parameter two.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
- val ret = a.toArray
- stableSort(ret, f)
- ret
- }
+ /** Sort an array of Floats using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
+
+ private final val qsortThreshold = 16
- /** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] =
- stableSort(a, Ordering[K].lt _)
-
- /** Stably sorts a sequence of items given an extraction function that will
- * return an ordered key from an item.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] =
- stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f)
-
- private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) {
- val ord = Ordering[K]
- import ord._
-
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
- }
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
+ /** Sort array `a` with quicksort, using the Ordering on its elements.
+ * This algorithm sorts in place, so no additional memory is used aside from
+ * what might be required to box individual elements during comparison.
+ */
+ def quickSort[K: Ordering](a: Array[K]): Unit = {
+ // Must have iN >= i0 or math will fail. Also, i0 >= 0.
+ def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
+ if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ var iK = (i0 + iN) >>> 1 // Unsigned div by 2
+ // Find index of median of first, central, and last elements
+ var pL =
+ if (ord.compare(a(i0), a(iN - 1)) <= 0)
+ if (ord.compare(a(i0), a(iK)) < 0)
+ if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
+ else i0
+ else
+ if (ord.compare(a(i0), a(iK)) < 0) i0
+ else
+ if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1
+ else iK
+ val pivot = a(pL)
+ // pL is the start of the pivot block; move it into the middle if needed
+ if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK }
+ // Elements equal to the pivot will be in range pL until pR
+ var pR = pL + 1
+ // Items known to be less than pivot are below iA (range i0 until iA)
+ var iA = i0
+ // Items known to be greater than pivot are at or above iB (range iB until iN)
+ var iB = iN
+ // Scan through everything in the buffer before the pivot(s)
+ while (pL - iA > 0) {
+ val current = a(iA)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iA) = a(pL - 1)
+ a(pL - 1) = current
+ pL -= 1
+ case x if x < 0 =>
+ // Already in place. Just update indicies.
+ iA += 1
+ case _ if iB > pR =>
+ // Wrong side. There's room on the other side, so swap
+ a(iA) = a(iB - 1)
+ a(iB - 1) = current
+ iB -= 1
+ case _ =>
+ // Wrong side and there is no room. Swap by rotating pivot block.
+ a(iA) = a(pL - 1)
+ a(pL - 1) = a(pR - 1)
+ a(pR - 1) = current
+ pL -= 1
+ pR -= 1
+ iB -= 1
}
- i += 1
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
+ // Get anything remaining in buffer after the pivot(s)
+ while (iB - pR > 0) {
+ val current = a(iB - 1)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iB - 1) = a(pR)
+ a(pR) = current
+ pR += 1
+ case x if x > 0 =>
+ // Already in place. Just update indices.
+ iB -= 1
+ case _ =>
+ // Wrong side and we already know there is no room. Swap by rotating pivot block.
+ a(iB - 1) = a(pR)
+ a(pR) = a(pL)
+ a(pL) = current
+ iA += 1
+ pL += 1
+ pR += 1
}
- m = med3(l, m, n) // Mid-size, med of 3
}
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) equiv v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) equiv v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen
+ if (iA - i0 < iN - iB) {
+ inner(a, i0, iA, ord) // True recursion
+ inner(a, iB, iN, ord) // Should be tail recursion
+ }
+ else {
+ inner(a, iB, iN, ord) // True recursion
+ inner(a, i0, iA, ord) // Should be tail recursion
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
}
}
- sort2(off, len)
+ inner(a, 0, a.length, implicitly[Ordering[K]])
}
-
- private def sort1(x: Array[Int], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ private final val mergeThreshold = 32
+
+ // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
+ // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
+ private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
+ val n = iN - i0
+ if (n < 2) return
+ if (ord.compare(a(i0), a(i0+1)) > 0) {
+ val temp = a(i0)
+ a(i0) = a(i0+1)
+ a(i0+1) = temp
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j>off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
+ var m = 2
+ while (m < n) {
+ // Speed up already-sorted case by checking last element first
+ val next = a(i0 + m)
+ if (ord.compare(next, a(i0+m-1)) < 0) {
+ var iA = i0
+ var iB = i0 + m - 1
+ while (iB - iA > 1) {
+ val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2
+ if (ord.compare(next, a(ix)) < 0) iB = ix
+ else iA = ix
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) == v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) == v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1)
+ var i = i0 + m
+ while (i > ix) {
+ a(i) = a(i-1)
+ i -= 1
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ a(ix) = next
}
+ m += 1
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Double], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
+ private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
+ if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow
+ val sc = if (scratch eq null) new Array[T](iK - i0) else scratch
+ mergeSort(a, i0, iK, ord, sc)
+ mergeSort(a, iK, iN, ord, sc)
+ mergeSorted(a, i0, iK, iN, ord, sc)
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
+ }
+
+ // Must have 0 <= i0 < iK < iN
+ private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
+ // Check to make sure we're not already in order
+ if (ord.compare(a(iK-1), a(iK)) > 0) {
+ var i = i0
+ val jN = iK - i0
+ var j = 0
+ while (i < iK) {
+ scratch (j) = a(i)
i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
+ j += 1
}
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ var k = i0
+ j = 0
+ while (i < iN && j < jN) {
+ if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 }
+ else { a(k) = scratch(j); j += 1 }
+ k += 1
}
+ while (j < jN) { a(k) = scratch(j); j += 1; k += 1 }
+ // Don't need to finish a(i) because it's already in place, k = i
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Float], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Why would you even do this?
+ private def booleanSort(a: Array[Boolean]): Unit = {
+ var i = 0
+ var n = 0
+ while (i < a.length) {
+ if (!a(i)) n += 1
+ i += 1
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
+ i = 0
+ while (i < n) {
+ a(i) = false
+ i += 1
}
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
- }
+ while (i < a.length) {
+ a(i) = true
+ i += 1
}
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
+ }
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
+ // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
+ // Maybe also rename all these methods to `sort`.
+ @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
+ case _: Array[AnyRef] =>
+ // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
+ if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
+ java.util.Arrays.sort(a, ord)
+ case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord)
+ case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord)
+ case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord)
+ case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord)
+ case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord)
+ case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord)
+ // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case.
+ case null => throw new NullPointerException
+ }
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
+ // TODO: remove unnecessary ClassTag (not binary compatible)
+ /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K])
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
- }
- }
- sort2(off, len)
+ // TODO: Remove unnecessary ClassTag (not binary compatible)
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f)
+
+ /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[K])
+ ret
}
- private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
- if (lo < hi) {
- val mid = (lo+hi) / 2
- stableSort(a, lo, mid, scratch, f)
- stableSort(a, mid+1, hi, scratch, f)
- var k, t_lo = lo
- var t_hi = mid + 1
- while (k <= hi) {
- if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) {
- scratch(k) = a(t_lo)
- t_lo += 1
- } else {
- scratch(k) = a(t_hi)
- t_hi += 1
- }
- k += 1
- }
- k = lo
- while (k <= hi) {
- a(k) = scratch(k)
- k += 1
- }
- }
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering fromLessThan f)
+ ret
+ }
+
+ /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[M] on f)
+ ret
}
}
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 0a6a7972c2..b0eae74043 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -48,7 +48,7 @@ import scala.language.implicitConversions
* catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
* pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
* upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
- * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * passed on down the chain. Combinators such as `recover` and `recoverWith` are designed to provide some type of
* default behavior in the case of failure.
*
* ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index ce60ade9f5..8c32a92ecd 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1184,6 +1184,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
+ if (jconstr.javaFlags.isVarargs) constr modifyInfo arrayToRepeated
markAllCompleted(constr)
constr
}
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index c281126d5f..e355d9f864 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -69,6 +69,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// Used in a test case.
def showDirectory() = replOutput.show(out)
+ lazy val isClassBased: Boolean = settings.Yreplclassbased.value
+
private[nsc] var printResults = true // whether to print result lines
private[nsc] var totalSilence = false // whether to print anything
private var _initializeComplete = false // compiler is initialized
@@ -310,8 +312,14 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
def originalPath(name: String): String = originalPath(TermName(name))
- def originalPath(name: Name): String = typerOp path name
- def originalPath(sym: Symbol): String = typerOp path sym
+ def originalPath(name: Name): String = translateOriginalPath(typerOp path name)
+ def originalPath(sym: Symbol): String = translateOriginalPath(typerOp path sym)
+ /** For class based repl mode we use an .INSTANCE accessor. */
+ val readInstanceName = if(isClassBased) ".INSTANCE" else ""
+ def translateOriginalPath(p: String): String = {
+ val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read)
+ p.replaceFirst(readName, readName + readInstanceName)
+ }
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
@@ -758,11 +766,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// object and we can do that much less wrapping.
def packageDecl = "package " + packageName
+ def pathToInstance(name: String) = packageName + "." + name + readInstanceName
def pathTo(name: String) = packageName + "." + name
def packaged(code: String) = packageDecl + "\n\n" + code
- def readPath = pathTo(readName)
- def evalPath = pathTo(evalName)
+ def readPathInstance = pathToInstance(readName)
+ def readPath = pathTo(readName)
+ def evalPath = pathTo(evalName)
def call(name: String, args: Any*): AnyRef = {
val m = evalMethod(name)
@@ -802,7 +812,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** The innermost object inside the wrapper, found by
* following accessPath into the outer one.
*/
- def resolvePathToSymbol(accessPath: String): Symbol = {
+ def resolvePathToSymbol(fullAccessPath: String): Symbol = {
+ val accessPath = fullAccessPath.stripPrefix(readPath)
val readRoot = readRootPath(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
@@ -849,7 +860,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def defines = defHandlers flatMap (_.definedSymbols)
def imports = importedSymbols
def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
-
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -858,6 +868,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** handlers for each tree in this request */
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ val definesClass = handlers.exists {
+ case _: ClassHandler => true
+ case _ => false
+ }
+
def defHandlers = handlers collect { case x: MemberDefHandler => x }
/** list of names used by this expression */
@@ -875,13 +890,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
* append to objectName to access anything bound by request.
*/
lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode))
+ exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass))
/** the line of code to compute */
def toCompute = line
/** The path of the value that contains the user code. */
- def fullAccessPath = s"${lineRep.readPath}$accessPath"
+ def fullAccessPath = s"${lineRep.readPathInstance}$accessPath"
/** The path of the given member of the wrapping instance. */
def fullPath(vname: String) = s"$fullAccessPath.`$vname`"
@@ -911,7 +926,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap: String
}
- private class ObjectBasedWrapper extends Wrapper {
+ class ObjectBasedWrapper extends Wrapper {
def preambleHeader = "object %s {"
def postamble = importsTrailer + "\n}"
@@ -919,13 +934,16 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap = "}\n"
}
- private class ClassBasedWrapper extends Wrapper {
- def preambleHeader = "class %s extends Serializable {"
+ class ClassBasedWrapper extends Wrapper {
+ def preambleHeader = "class %s extends Serializable { "
/** Adds an object that instantiates the outer wrapping class. */
- def postamble = s"""$importsTrailer
+ def postamble = s"""
+ |$importsTrailer
+ |}
+ |object ${lineRep.readName} {
+ | val INSTANCE = new ${lineRep.readName}();
|}
- |object ${lineRep.readName} extends ${lineRep.readName}
|""".stripMargin
import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
@@ -935,7 +953,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
private lazy val ObjectSourceCode: Wrapper =
- if (settings.Yreplclassbased) new ClassBasedWrapper else new ObjectBasedWrapper
+ if (isClassBased) new ClassBasedWrapper else new ObjectBasedWrapper
private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
@@ -994,7 +1012,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+ lazy val resultSymbol = lineRep.resolvePathToSymbol(fullAccessPath)
def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 5244858a62..3ec77e46f1 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -92,7 +92,7 @@ trait Imports {
* last one imported is actually usable.
*/
case class ComputedImports(prepend: String, append: String, access: String)
- protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper): ComputedImports = {
+ protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = {
/** Narrow down the list of requests from which imports
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
@@ -107,6 +107,8 @@ trait Imports {
// Single symbol imports might be implicits! See bug #1752. Rather than
// try to finesse this, we will mimic all imports for now.
def keepHandler(handler: MemberHandler) = handler match {
+ /* While defining classes in class based mode - implicits are not needed. */
+ case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x))
case _: ImportHandler => true
case x => x.definesImplicit || (x.definedNames exists wanted)
}
@@ -146,7 +148,10 @@ trait Imports {
// loop through previous requests, adding imports for each one
wrapBeforeAndAfter {
+ // Reusing a single temporary value when import from a line with multiple definitions.
+ val tempValLines = mutable.Set[Int]()
for (ReqAndHandler(req, handler) <- reqsToUse) {
+ val objName = req.lineRep.readPathInstance
handler match {
// If the user entered an import, then just use it; add an import wrapping
// level if the import might conflict with some other import
@@ -157,6 +162,23 @@ trait Imports {
code append (x.member + "\n")
currentImps ++= x.importedNames
+ case x if isClassBased =>
+ for (imv <- x.definedNames) {
+ if (!currentImps.contains(imv)) {
+ x match {
+ case _: ClassHandler =>
+ code.append("import " + objName + req.accessPath + ".`" + imv + "`\n")
+ case _ =>
+ val valName = req.lineRep.packageName + req.lineRep.readName
+ if (!tempValLines.contains(req.lineRep.lineId)) {
+ code.append(s"val $valName = $objName\n")
+ tempValLines += req.lineRep.lineId
+ }
+ code.append(s"import $valName${req.accessPath}.`$imv`;\n")
+ }
+ currentImps += imv
+ }
+ }
// For other requests, import each defined name.
// import them explicitly instead of with _, so that
// ambiguity errors will not be generated. Also, quote
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index c384ed7034..81036b4908 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -177,7 +177,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
}
{
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index f984b4579f..778839a1f5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -90,8 +90,12 @@ trait ModelFactoryImplicitSupport {
else {
val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
- val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
+ val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) ++
+ global.analyzer.allViewsFrom(byNameType(sym.tpe_*), context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ //debug(results.mkString("All views\n ", "\n ", "\n"))
+ //debug(conversions.mkString("Conversions\n ", "\n ", "\n"))
+
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
@@ -193,7 +197,7 @@ trait ModelFactoryImplicitSupport {
List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
} catch {
case i: ImplicitNotFound =>
- //println(" Eliminating: " + toType)
+ //debug(s" Eliminating: $toType")
Nil
}
}