summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bincompat-forward.whitelist.conf53
-rw-r--r--build.sbt7
-rw-r--r--spec/04-basic-declarations-and-definitions.md30
-rw-r--r--spec/05-classes-and-objects.md1
-rw-r--r--src/build/genprod.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala251
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala265
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala282
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala38
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala240
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala8
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala7
-rw-r--r--src/library/scala/Function2.scala6
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala8
-rw-r--r--src/library/scala/collection/immutable/Queue.scala9
-rw-r--r--src/library/scala/collection/immutable/Stream.scala6
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/math/Numeric.scala6
-rw-r--r--src/library/scala/util/Sorting.scala712
-rw-r--r--src/library/scala/util/Try.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala48
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala24
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala8
-rw-r--r--test/files/neg/inlineMaxSize.check4
-rw-r--r--test/files/neg/inlineMaxSize.scala2
-rw-r--r--test/files/pos/t9356/Foo_2.scala6
-rw-r--r--test/files/pos/t9356/MyAnnotation.java12
-rw-r--r--test/files/pos/t9356/Test_3.scala3
-rw-r--r--test/files/run/range.scala18
-rw-r--r--test/files/run/repl-serialization.check25
-rw-r--r--test/files/run/repl-serialization.scala68
-rw-r--r--test/files/run/t7747-repl.check41
-rw-r--r--test/files/run/t7747-repl.scala6
-rw-r--r--test/files/run/toolbox-varargs/Test.scala13
-rw-r--r--test/files/run/toolbox-varargs/Varargs.java8
-rw-r--r--test/junit/scala/collection/immutable/RangeConsistencyTest.scala11
-rw-r--r--test/junit/scala/math/BigDecimalTest.scala32
-rw-r--r--test/junit/scala/math/NumericTest.scala28
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala231
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala16
-rw-r--r--test/junit/scala/util/SortingTest.scala69
-rw-r--r--versions.properties2
50 files changed, 2107 insertions, 601 deletions
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 3808083dd3..8fadb65f39 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -319,6 +319,59 @@ filter {
{
matchName="scala.util.Random.scala$util$Random$$nextAlphaNum$1"
problemName=MissingMethodProblem
+ },
+ // Nominally private but in practice JVM-visible methods for reworked scala.util.Sorting
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mBc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mFc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mJc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mCc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mSc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$insertionSort"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mZc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mDc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mIc$sp"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$mergeSorted"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/build.sbt b/build.sbt
index 553c217d4a..e960a4c3d2 100644
--- a/build.sbt
+++ b/build.sbt
@@ -192,7 +192,12 @@ lazy val interactive = configureAsSubproject(project)
.dependsOn(compiler)
lazy val repl = configureAsSubproject(project)
- .settings(libraryDependencies += jlineDep)
+ .settings(
+ libraryDependencies += jlineDep,
+ connectInput in run := true,
+ outputStrategy in run := Some(StdoutOutput),
+ run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments.
+ )
.settings(disableDocsAndPublishingTasks: _*)
.dependsOn(compiler)
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md
index 7fb5427d36..8437d2ea71 100644
--- a/spec/04-basic-declarations-and-definitions.md
+++ b/spec/04-basic-declarations-and-definitions.md
@@ -298,7 +298,7 @@ the sequence of variable definitions
```ebnf
Dcl ::= ‘type’ {nl} TypeDcl
TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
-Def ::= type {nl} TypeDef
+Def ::= ‘type’ {nl} TypeDef
TypeDef ::= id [TypeParamClause] ‘=’ Type
```
@@ -620,7 +620,11 @@ well as the function body, if it is present.
A value parameter clause $\mathit{ps}$ consists of zero or more formal
parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value
-parameters and associate them with their types. Each value parameter
+parameters and associate them with their types.
+
+### Default Arguments
+
+Each value parameter
declaration may optionally define a default argument. The default argument
expression $e$ is type-checked with an expected type $T'$ obtained
by replacing all occurences of the function's type parameters in $T$ by
@@ -632,13 +636,7 @@ expression. Here, $n$ denotes the parameter's position in the method
declaration. These methods are parametrized by the type parameter clause
`[$\mathit{tps}\,$]` and all value parameter clauses
`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$.
-The `$f\$$default$\$$n` methods are inaccessible for
-user programs.
-
-The scope of a formal value parameter name $x$ comprises all subsequent
-parameter clauses, as well as the method return type and the function body, if
-they are given. Both type parameter names and value parameter names must
-be pairwise distinct.
+The `$f\$$default$\$$n` methods are inaccessible for user programs.
###### Example
In the method
@@ -657,6 +655,20 @@ def compare$\$$default$\$$1[T]: Int = 0
def compare$\$$default$\$$2[T](a: T): T = a
```
+The scope of a formal value parameter name $x$ comprises all subsequent
+parameter clauses, as well as the method return type and the function body, if
+they are given. Both type parameter names and value parameter names must
+be pairwise distinct.
+
+A default value which depends on earlier parameters uses the actual arguments
+if they are provided, not the default arguments.
+
+```scala
+def f(a: Int = 0)(b: Int = a + 1) = b // OK
+// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a"
+f(10)() // returns 11 (not 1)
+```
+
### By-Name Parameters
```ebnf
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
index 28abe6c3bc..8be792d3cb 100644
--- a/spec/05-classes-and-objects.md
+++ b/spec/05-classes-and-objects.md
@@ -395,6 +395,7 @@ class C extends A with B { type T <: C }
Let $C$ be a class type. The _inheritance closure_ of $C$ is the
smallest set $\mathscr{S}$ of types such that
+- $C$ is in $\mathscr{S}$.
- If $T$ is in $\mathscr{S}$, then every type $T'$ which forms syntactically
a part of $T$ is also in $\mathscr{S}$.
- If $T$ is a class type in $\mathscr{S}$, then all [parents](#templates)
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index ed436fe2e4..b470348e8c 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -123,7 +123,10 @@ object FunctionOne extends Function(1) {
* def apply(x: Int): Int = x + 1
* }
* assert(succ(0) == anonfun1(0))
- * """)
+ * """) + """
+ *
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle."""
override def moreMethods = """
/** Composes two instances of Function1 in a new Function1, with this function applied last.
@@ -178,13 +181,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
*
* {{{
* object Main extends App {%s}
- * }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-"""
+ * }}}"""
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index f866c0d038..76af40b330 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -617,18 +617,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case Some(oldsym) if oldsym.exists && trackedSym.exists =>
- assert(
- // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
- s"""|Different class symbols have the same bytecode-level internal name:
- | name: $internalName
- | oldsym: ${oldsym.fullNameString}
- | tracked: ${trackedSym.fullNameString}
- """.stripMargin
- )
- case _ =>
+ case None =>
reverseJavaName.put(internalName, trackedSym)
+ case Some(oldsym) =>
+ // TODO: `duplicateOk` seems pretty ad-hoc (a more aggressive version caused SI-9356 because it called oldSym.exists, which failed in the unpickler; see also SI-5031)
+ def duplicateOk = oldsym == NoSymbol || trackedSym == NoSymbol || (syntheticCoreClasses contains oldsym) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule))
+ if (oldsym != trackedSym && !duplicateOk)
+ devWarning(s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}""".stripMargin)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
new file mode 100644
index 0000000000..7bbe1e2a49
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
@@ -0,0 +1,251 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.collection.{mutable, immutable}
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter}
+import opt.BytecodeUtils._
+
+object AliasingFrame {
+ private var _idCounter: Long = 0l
+ private def nextId = { _idCounter += 1; _idCounter }
+}
+
+class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) {
+ import Opcodes._
+
+ // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame`
+ def this(src: Frame[_ <: V]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ /**
+ * For each slot (entry in the `values` array of the frame), an id that uniquely represents
+ * the object stored in it. If two values have the same id, they are aliases of the same
+ * object.
+ */
+ private val aliasIds: Array[Long] = Array.fill(nLocals + nStack)(AliasingFrame.nextId)
+
+ /**
+ * The object alias id of for a value index.
+ */
+ def aliasId(entry: Int) = aliasIds(entry)
+
+ /**
+ * Returns the indices of the values array which are aliases of the object `id`.
+ */
+ def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.iterator.filter(i => aliasId(i) == id)
+
+ /**
+ * The set of aliased values for a given entry in the `values` array.
+ */
+ def aliasesOf(entry: Int): Set[Int] = valuesWithAliasId(aliasIds(entry))
+
+ /**
+ * Define a new alias. For example, given
+ * var a = this // this, a have the same aliasId
+ * then an assignment
+ * b = a
+ * will set the same the aliasId for `b`.
+ */
+ private def newAlias(assignee: Int, source: Int): Unit = {
+ aliasIds(assignee) = aliasIds(source)
+ }
+
+ /**
+ * An assignment
+ * a = someUnknownValue()
+ * sets a fresh alias id for `a`.
+ * A stack value is also removed from its alias set when being consumed.
+ */
+ private def removeAlias(assignee: Int): Unit = {
+ aliasIds(assignee) = AliasingFrame.nextId
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = {
+ // Make the extendsion methods easier to use (otherwise we have to repeat `this`.stackTop)
+ def stackTop: Int = this.stackTop
+ def peekStack(n: Int): V = this.peekStack(n)
+
+ // the val pattern `val (p, c) = f` still allocates a tuple (https://github.com/scala-opt/scala/issues/28)
+ val prodCons = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc
+ val consumed = prodCons._1
+ val produced = prodCons._2
+
+ super.execute(insn, interpreter)
+
+ (insn.getOpcode: @switch) match {
+ case ALOAD =>
+ newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`)
+
+ case DUP =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+
+ case DUP_X1 =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+
+ case DUP_X2 =>
+ // Check if the second element on the stack is size 2
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2
+ val isSize2 = peekStack(1).getSize == 2
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ if (isSize2) {
+ // Size 2 values on the stack only take one slot in the `values` array
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ } else {
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top, source = top - 2)
+ }
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+
+ case DUP2_X2 =>
+ val top = stackTop
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) {
+ // Form 4
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ // Form 2
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) {
+ // Form 3
+ newAlias(assignee = top - 3, source = top)
+ newAlias(assignee = top - 4, source = top - 1)
+ } else {
+ // Form 1
+ newAlias(assignee = top - 3, source = top - 5)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+ }
+
+ case SWAP =>
+ val top = stackTop
+ val idTop = aliasIds(top)
+ aliasIds(top) = aliasIds(top - 1)
+ aliasIds(top - 1) = idTop
+
+ case opcode =>
+ if (opcode == ASTORE) {
+ // Not a separate case because we need to remove the consumed stack value from alias sets after.
+ val stackTopBefore = stackTop - produced + consumed
+ val local = insn.asInstanceOf[VarInsnNode].`var`
+ newAlias(assignee = local, source = stackTopBefore)
+ // if the value written is size 2, it overwrites the subsequent slot, which is then no
+ // longer an alias of anything. see the corresponding case in `Frame.execute`.
+ if (getLocal(local).getSize == 2)
+ removeAlias(local + 1)
+
+ // if the value at the preceding index is size 2, it is no longer valid, so we remove its
+ // aliasing. see corresponding case in `Frame.execute`
+ if (local > 0) {
+ val precedingValue = getLocal(local - 1)
+ if (precedingValue != null && precedingValue.getSize == 2)
+ removeAlias(local - 1)
+ }
+ }
+
+ // Remove consumed stack values from aliasing sets.
+ // Example: iadd
+ // - before: local1, local2, stack1, consumed1, consumed2
+ // - after: local1, local2, stack1, produced1 // stackTop = 3
+ val firstConsumed = stackTop - produced + 1 // firstConsumed = 3
+ for (i <- 0 until consumed)
+ removeAlias(firstConsumed + i) // remove aliases for 3 and 4
+
+ // We don't need to set the aliases ids for the produced values: the aliasIds array already
+ // contains fresh ids for non-used stack values (ensured by removeAlias).
+ }
+ }
+
+ /**
+ * Merge the AliasingFrame `other` into this AliasingFrame.
+ *
+ * Aliases that are common in both frames are kept. Example:
+ *
+ * var x, y = null
+ * if (...) {
+ * x = a
+ * y = a // (x, y, a) are aliases
+ * } else {
+ * x = a
+ * y = b // (x, a) and (y, b)
+ * }
+ * [...] // (x, a)
+ */
+ override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = {
+ val valuesChanged = super.merge(other, interpreter)
+ var aliasesChanged = false
+ val aliasingOther = other.asInstanceOf[AliasingFrame[_]]
+ for (i <- aliasIds.indices) {
+ val thisAliases = aliasesOf(i)
+ val thisNotOther = thisAliases diff (thisAliases intersect aliasingOther.aliasesOf(i))
+ if (thisNotOther.nonEmpty) {
+ aliasesChanged = true
+ thisNotOther foreach removeAlias
+ }
+ }
+ valuesChanged || aliasesChanged
+ }
+
+ override def init(src: Frame[_ <: V]): Frame[V] = {
+ super.init(src)
+ compat.Platform.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliasIds, 0, aliasIds, 0, aliasIds.length)
+ this
+ }
+}
+
+/**
+ * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis
+ * needs to track aliases, but doesn't require a more specific Frame subclass.
+ */
+class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
new file mode 100644
index 0000000000..98e93c125b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
@@ -0,0 +1,265 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.Type
+import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Value}
+import opt.BytecodeUtils._
+import collection.immutable
+
+object InstructionStackEffect {
+ private var cache: immutable.IntMap[(Int, Int)] = immutable.IntMap.empty
+ private def t(x: Int, y: Int): (Int, Int) = {
+ // x can go up to 255 (number of parameters of a method, dimensions in multianewarray) we cache
+ // x up to 10, which covers most cases and limits the cache. y doesn't go above 6 (see cases).
+ if (x > 10 || y > 6) (x, y)
+ else {
+ val key = (x << 8) + y // this would work for any x < 256
+ if (cache contains key) {
+ cache(key)
+ } else {
+ val r = (x, y)
+ cache += key -> r
+ r
+ }
+ }
+ }
+
+ /**
+ * Returns a pair with the number of stack values consumed and produced by `insn`.
+ * This method requires the `frame` to be in the state **before** executing / interpreting
+ * the `insn`.
+ */
+ def apply[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): (Int, Int) = {
+ def peekStack(n: Int): V = frame.peekStack(n)
+
+ (insn.getOpcode: @switch) match {
+ // The order of opcodes is the same as in Frame.execute.
+ case NOP => t(0, 0)
+
+ case ACONST_NULL |
+ ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 |
+ LCONST_0 |
+ LCONST_1 |
+ FCONST_0 |
+ FCONST_1 |
+ FCONST_2 |
+ DCONST_0 |
+ DCONST_1 |
+ BIPUSH |
+ SIPUSH |
+ LDC |
+ ILOAD |
+ LLOAD |
+ FLOAD |
+ DLOAD |
+ ALOAD => t(0, 1)
+
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => t(2, 1)
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => t(1, 0)
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => t(3, 0)
+
+ case POP => t(1, 0)
+
+ case POP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(1, 0) else t(2, 0)
+
+ case DUP => t(0, 1)
+
+ case DUP_X1 => t(2, 3)
+
+ case DUP_X2 =>
+ val isSize2 = peekStack(1).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(0, 1) else t(0, 2)
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2_X2 =>
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) t(2, 3) else t(3, 4)
+ } else {
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) t(3, 5) else t(4, 6)
+ }
+
+ case SWAP => t(2, 2)
+
+ case IADD |
+ LADD |
+ FADD |
+ DADD |
+ ISUB |
+ LSUB |
+ FSUB |
+ DSUB |
+ IMUL |
+ LMUL |
+ FMUL |
+ DMUL |
+ IDIV |
+ LDIV |
+ FDIV |
+ DDIV |
+ IREM |
+ LREM |
+ FREM |
+ DREM => t(2, 1)
+
+ case INEG |
+ LNEG |
+ FNEG |
+ DNEG => t(1, 1)
+
+ case ISHL |
+ LSHL |
+ ISHR |
+ LSHR |
+ IUSHR |
+ LUSHR |
+ IAND |
+ LAND |
+ IOR |
+ LOR |
+ IXOR |
+ LXOR => t(2, 1)
+
+ case IINC => t(0, 0)
+
+ case I2L |
+ I2F |
+ I2D |
+ L2I |
+ L2F |
+ L2D |
+ F2I |
+ F2L |
+ F2D |
+ D2I |
+ D2L |
+ D2F |
+ I2B |
+ I2C |
+ I2S => t(1, 1)
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => t(2, 1)
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => t(1, 0)
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => t(2, 0)
+
+ case GOTO => t(0, 0)
+
+ case JSR => t(0, 1)
+
+ case RET => t(0, 0)
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => t(1, 0)
+
+ case IRETURN |
+ LRETURN |
+ FRETURN |
+ DRETURN |
+ ARETURN => t(1, 0) // Frame.execute consumes one stack value
+
+ case RETURN => t(0, 0) // Frame.execute does not change the stack
+
+ case GETSTATIC => t(0, 1)
+
+ case PUTSTATIC => t(1, 0)
+
+ case GETFIELD => t(1, 1)
+
+ case PUTFIELD => t(2, 0)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1)
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case INVOKEDYNAMIC =>
+ val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case NEW => t(0, 1)
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => t(1, 1)
+
+ case ATHROW => t(1, 0) // Frame.execute consumes one stack value
+
+ case CHECKCAST |
+ INSTANCEOF => t(1, 1) // Frame.execute does push(pop()) for both of them
+
+ case MONITORENTER |
+ MONITOREXIT => t(1, 0)
+
+ case MULTIANEWARRAY => t(insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1)
+
+ case IFNULL |
+ IFNONNULL => t(1, 0)
+ }
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
new file mode 100644
index 0000000000..31710dcbee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
@@ -0,0 +1,282 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import java.util
+
+import scala.annotation.switch
+import scala.tools.asm.{Type, Opcodes}
+import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Analyzer, Interpreter, Value}
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils
+import BytecodeUtils._
+
+/**
+ * Some notes on the ASM ananlyzer framework.
+ *
+ * Value
+ * - Abstract, needs to be implemented for each analysis.
+ * - Represents the desired information about local variables and stack values, for example:
+ * - Is this value known to be null / not null?
+ * - What are the instructions that could potentially have produced this value?
+ *
+ * Interpreter
+ * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing
+ * interpreter, e.g., SourceInterpreter or BasicInterpreter.
+ * - Multiple abstract methods that receive an instruction and the instruction's input values, and
+ * return a value representing the result of that instruction.
+ * - Note: due to control flow, the interpreter can be invoked multiple times for the same
+ * instruction, until reaching a fixed point.
+ * - Abstract `merge` function that computes the least upper bound of two values. Used by
+ * Frame.merge (see below).
+ *
+ * Frame
+ * - Can be used directly for many analyses, no subclass required.
+ * - Every frame has an array of values: one for each local variable and for each stack slot.
+ * - A `top` index stores the index of the current stack top
+ * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty
+ * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next
+ * stack value.
+ * - Defines the `execute(instruction)` method.
+ * - executing mutates the state of the frame according to the effect of the instruction
+ * - pop consumed values from the stack
+ * - pass them to the interpreter together with the instruction
+ * - if applicable, push the resulting value on the stack
+ * - Defines the `merge(otherFrame)` method
+ * - called by the analyzer when multiple control flow paths lead to an instruction
+ * - the frame at the branching instruction is merged into the current frame of the
+ * instruction (held by the analyzer)
+ * - mutates the values of the current frame, merges all values using interpreter.merge.
+ *
+ * Analyzer
+ * - Stores a frame for each instruction
+ * - `merge` function takes an instruction and a frame, merges the existing frame for that instr
+ * (from the frames array) with the new frame passed as argument.
+ * if the frame changed, puts the instruction on the work queue (fixpiont).
+ * - initial frame: initialized for first instr by calling interpreter.new[...]Value
+ * for each slot (locals and params), stored in frames[firstInstr] by calling `merge`
+ * - work queue of instructions (`queue` array, `top` index for next instruction to analyze)
+ * - analyze(method): simulate control flow. while work queue non-empty:
+ * - copy the state of `frames[instr]` into a local frame `current`
+ * - call `current.execute(instr, interpreter)`, mutating the `current` frame
+ * - if it's a branching instruction
+ * - for all potential destination instructions
+ * - merge the destination instruction frame with the `current` frame
+ * (this enqueues the destination instr if its frame changed)
+ * - invoke `newControlFlowEdge` (see below)
+ * - the analyzer also tracks active exception handlers at each instruction
+ * - the empty method `newControlFlowEdge` can be overridden to track control flow if required
+ *
+ *
+ * Some notes on nullness analysis.
+ *
+ * For an instance method, `this` is non-null at entry. So we have to return a NotNull value when
+ * the analyzer is initializing the first frame of a method (see above). This required a change of
+ * the analyzer: before it would simply call `interpreter.newValue`, where we don't have the
+ * required context. See https://github.com/scala/scala-asm/commit/8133d75032.
+ *
+ * After some operations we know that a certain value is not null (e.g. the receiver of an instance
+ * call). However, the receiver is an value on the stack and consumed while interpreting the
+ * instruction - so we can only gain some knowledge if we know that the receiver was an alias of
+ * some other local variable or stack slot. Therefore we use the AliasingFrame class.
+ *
+ * TODO:
+ * Finally, we'd also like to exploit the knowledge gained from `if (x == null)` tests: x is known
+ * to be null in one branch, not null in the other. This will make use of alias tracking as well.
+ * We still have to figure out how to do this exactly in the analyzer framework.
+ */
+
+/**
+ * Type to represent nullness of values.
+ */
+sealed trait Nullness {
+ final def merge(other: Nullness) = if (this == other) this else Unknown
+}
+case object NotNull extends Nullness
+case object Unknown extends Nullness
+case object Null extends Nullness
+
+/**
+ * Represents the nullness state for a local variable or stack value.
+ *
+ * Note that nullness of primitive values is not tracked, it will be always [[Unknown]].
+ */
+sealed trait NullnessValue extends Value {
+ /**
+ * The nullness of this value.
+ */
+ def nullness: Nullness
+
+ /**
+ * True if this value is a long or double. The Analyzer framework needs to know
+ * the size of each value when interpreting instructions, see `Frame.execute`.
+ */
+ def isSize2: Boolean
+ /**
+ * The size of the slot described by this value. Cannot be 0 because no values are allocated
+ * for void-typed slots, see NullnessInterpreter.newValue.
+ **/
+ def getSize: Int = if (isSize2) 2 else 1
+
+ def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2)
+}
+
+object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" }
+object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" }
+object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" }
+object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" }
+
+object NullnessValue {
+ def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = {
+ if (nullness == Null) NullValue
+ else if (nullness == NotNull) NotNullValue
+ else if (isSize2) UnknownValue2
+ else UnknownValue1
+ }
+
+ def apply(nullness: Nullness, insn: AbstractInsnNode): NullnessValue = {
+ apply(nullness, isSize2 = BytecodeUtils.instructionResultSize(insn) == 2)
+ }
+}
+
+final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) {
+ def newValue(tp: Type): NullnessValue = {
+ // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter,
+ // which is provided by the framework.
+ //
+ // (1) For the void type, the ASM framework expects newValue to return `null`.
+ // Also, the Frame.returnValue field is `null` for methods with return type void.
+ // Example callsite passing VOID_TYPE: in Analyzer, `newValue(Type.getReturnType(m.desc))`.
+ //
+ // (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes
+ // `newValue(null)` for each local variable. We have to return a value of size 1.
+ if (tp == Type.VOID_TYPE) null // (1)
+ else NullnessValue(Unknown, isSize2 = tp != null /*(2)*/ && tp.getSize == 2 )
+ }
+
+ override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = {
+ // For instance methods, the `this` parameter is known to be not null.
+ if (isInstanceMethod && local == 0) NullnessValue(NotNull, isSize2 = false)
+ else super.newParameterValue(isInstanceMethod, local, tp)
+ }
+
+ def newOperation(insn: AbstractInsnNode): NullnessValue = {
+ val nullness = (insn.getOpcode: @switch) match {
+ case Opcodes.ACONST_NULL => Null
+
+ case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match {
+ case _: String | _: Type => NotNull
+ case _ => Unknown
+ }
+
+ case _ => Unknown
+ }
+
+ // for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call.
+ NullnessValue(nullness, insn)
+ }
+
+ def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value
+
+ def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.CHECKCAST => value
+
+ case Opcodes.NEWARRAY |
+ Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false)
+
+ case _ => NullnessValue(Unknown, insn)
+ }
+
+ def binaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, insn)
+ }
+
+ def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, isSize2 = false)
+ }
+
+ def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.MULTIANEWARRAY =>
+ NullnessValue(NotNull, isSize2 = false)
+
+ case _ =>
+ // TODO: use a list of methods that are known to return non-null values
+ NullnessValue(Unknown, insn)
+ }
+
+ def returnOperation(insn: AbstractInsnNode, value: NullnessValue, expected: NullnessValue): Unit = ()
+
+ def merge(a: NullnessValue, b: NullnessValue): NullnessValue = a merge b
+}
+
+class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessValue](nLocals, nStack) {
+ // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame`
+ def this(src: Frame[_ <: NullnessValue]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = {
+ import Opcodes._
+
+ // get the object id of the object that is known to be not-null after this operation
+ val nullCheckedAliasId: Long = (insn.getOpcode: @switch) match {
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD =>
+ aliasId(this.stackTop - 1)
+
+ case IASTORE |
+ FASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE |
+ LASTORE |
+ DASTORE =>
+ aliasId(this.stackTop - 2)
+
+ case GETFIELD =>
+ aliasId(this.stackTop)
+
+ case PUTFIELD =>
+ aliasId(this.stackTop - 1)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val numArgs = Type.getArgumentTypes(desc).length
+ aliasId(this.stackTop - numArgs)
+
+ case ARRAYLENGTH |
+ MONITORENTER |
+ MONITOREXIT =>
+ aliasId(this.stackTop)
+
+ case _ =>
+ -1
+ }
+
+ super.execute(insn, interpreter)
+
+ if (nullCheckedAliasId != -1) {
+ for (i <- valuesWithAliasId(nullCheckedAliasId))
+ this.setValue(i, NotNullValue)
+ }
+ }
+}
+
+/**
+ * This class is required to override the `newFrame` methods, which makes makes sure the analyzer
+ * uses NullnessFrames.
+ */
+class NullnessAnalyzer extends Analyzer[NullnessValue](new NullnessInterpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
index 201ab15177..9bd016f964 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -170,6 +170,8 @@ object BytecodeUtils {
new InsnNode(op)
}
+ def instructionResultSize(instruction: AbstractInsnNode) = InstructionResultSize(instruction)
+
def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
@@ -328,13 +330,38 @@ object BytecodeUtils {
class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) {
val analyzer = new Analyzer(interpreter)
analyzer.analyze(classInternalName, methodNode)
- def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
+ def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode)
+ }
+
+ implicit class AnalyzerExtensions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal {
+ def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
}
- implicit class `frame extensions`[V <: Value](val frame: Frame[V]) extends AnyVal {
- def peekDown(n: Int): V = {
- val topIndex = frame.getStackSize - 1
- frame.getStack(topIndex - n)
+ implicit class FrameExtensions[V <: Value](val frame: Frame[V]) extends AnyVal {
+ /**
+ * The value `n` positions down the stack.
+ */
+ def peekStack(n: Int): V = frame.getStack(frame.getStackSize - 1 - n)
+
+ /**
+ * The index of the current stack top.
+ */
+ def stackTop = frame.getLocals + frame.getStackSize - 1
+
+ /**
+ * Gets the value at slot i, where i may be a local or a stack index.
+ */
+ def getValue(i: Int): V = {
+ if (i < frame.getLocals) frame.getLocal(i)
+ else frame.getStack(i - frame.getLocals)
+ }
+
+ /**
+ * Sets the value at slot i, where i may be a local or a stack index.
+ */
+ def setValue(i: Int, value: V): Unit = {
+ if (i < frame.getLocals) frame.setLocal(i, value)
+ else frame.setStack(i - frame.getLocals, value)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
index 028f0f8fa6..0932564b1f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -8,12 +8,15 @@ package backend.jvm
package opt
import scala.reflect.internal.util.{NoPosition, Position}
+import scala.tools.asm.tree.analysis.{Value, Analyzer, BasicInterpreter}
+import scala.tools.asm.{Opcodes, Type}
import scala.tools.asm.tree._
import scala.collection.convert.decorateAsScala._
-import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InternalName}
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import scala.tools.nsc.backend.jvm.analysis.{NotNull, NullnessAnalyzer}
import ByteCodeRepository.{Source, CompilationUnit}
+import BytecodeUtils._
class CallGraph[BT <: BTypes](val btypes: BT) {
import btypes._
@@ -93,12 +96,25 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
// TODO: run dataflow analyses to make the call graph more precise
// - producers to get forwarded parameters (ForwardedParam)
// - typeAnalysis for more precise argument types, more precise callee
- // - nullAnalysis to skip emitting the receiver-null-check when inlining
- // TODO: for now we run a basic analyzer to get the stack height at the call site.
- // once we run a more elaborate analyzer (types, nullness), we can get the stack height out of there.
+ // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance
+ // call is known to be not-null, in which case we don't have to emit a null check when inlining.
+ // It is also used to get the stack height at the call site.
localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName)
- val analyzer = new AsmAnalyzer(methodNode, definingClass.internalName)
+
+ val analyzer: Analyzer[_ <: Value] = {
+ if (compilerSettings.YoptNullnessTracking) new NullnessAnalyzer
+ else new Analyzer(new BasicInterpreter)
+ }
+ analyzer.analyze(definingClass.internalName, methodNode)
+
+ def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = analyzer match {
+ case nullnessAnalyzer: NullnessAnalyzer =>
+ val frame = nullnessAnalyzer.frameAt(call, methodNode)
+ frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull
+
+ case _ => false
+ }
methodNode.instructions.iterator.asScala.collect({
case call: MethodInsnNode =>
@@ -126,13 +142,19 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
Nil
}
+ val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || {
+ val numArgs = Type.getArgumentTypes(call.desc).length
+ receiverNotNullByAnalysis(call, numArgs)
+ }
+
Callsite(
callsiteInstruction = call,
callsiteMethod = methodNode,
callsiteClass = definingClass,
callee = callee,
argInfos = argInfos,
- callsiteStackHeight = analyzer.frameAt(call).getStackSize,
+ callsiteStackHeight = analyzer.frameAt(call, methodNode).getStackSize,
+ receiverKnownNotNull = receiverNotNull,
callsitePosition = callsitePositions.getOrElse(call, NoPosition)
)
}).toList
@@ -154,7 +176,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
*/
final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType,
callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo],
- callsiteStackHeight: Int, callsitePosition: Position) {
+ callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position) {
override def toString =
"Invocation of" +
s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" +
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
index ac5c9ce2e6..814c78b69c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -49,7 +49,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
if (callGraph.callsites contains request.callsiteInstruction) {
val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass,
callee.callee, callee.calleeDeclarationClass,
- receiverKnownNotNull = false, keepLineNumbers = false)
+ request.receiverKnownNotNull, keepLineNumbers = false)
for (warning <- r) {
if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) {
@@ -89,7 +89,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
*/
def selectCallsitesForInlining: List[Callsite] = {
callsites.valuesIterator.filter({
- case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
val res = doInlineCallsite(callsite)
if (!res) {
@@ -112,7 +112,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
res
- case Callsite(ins, _, _, Left(warning), _, _, pos) =>
+ case Callsite(ins, _, _, Left(warning), _, _, _, pos) =>
if (warning.emitWarning(compilerSettings))
backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
false
@@ -123,7 +123,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* The current inlining heuristics are simple: inline calls to methods annotated @inline.
*/
def doInlineCallsite(callsite: Callsite): Boolean = callsite match {
- case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline
else annotatedInline && safeToInline
@@ -189,7 +189,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
// there's no need to run eliminateUnreachableCode here. building the call graph does that
// already, no code can become unreachable in the meantime.
val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter)
- val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekDown(traitMethodArgumentTypes.length)
+ val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekStack(traitMethodArgumentTypes.length)
for (i <- receiverValue.insns.asScala) {
val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName)
callsite.callsiteMethod.instructions.insert(i, cast)
@@ -215,6 +215,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
calleeInfoWarning = infoWarning)),
argInfos = Nil,
callsiteStackHeight = callsite.callsiteStackHeight,
+ receiverKnownNotNull = callsite.receiverKnownNotNull,
callsitePosition = callsite.callsitePosition
)
callGraph.callsites(newCallsiteInstruction) = staticCallsite
@@ -400,7 +401,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
val inlinedReturn = instructionMap(originalReturn)
val returnReplacement = new InsnList
- def drop(slot: Int) = returnReplacement add getPop(frame.peekDown(slot).getSize)
+ def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize)
// for non-void methods, store the stack top into the return local variable
if (hasReturnValue) {
@@ -444,6 +445,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
callee = originalCallsite.callee,
argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them)
callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight,
+ receiverKnownNotNull = originalCallsite.receiverKnownNotNull,
callsitePosition = originalCallsite.callsitePosition
)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
new file mode 100644
index 0000000000..8d744f6d13
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
@@ -0,0 +1,240 @@
+package scala.tools.nsc.backend.jvm.opt
+
+import scala.annotation.switch
+import scala.tools.asm.{Handle, Type, Opcodes}
+import scala.tools.asm.tree._
+
+object InstructionResultSize {
+ import Opcodes._
+ def apply(instruction: AbstractInsnNode): Int = (instruction.getOpcode: @switch) match {
+ // The order of opcodes is (almost) the same as in Opcodes.java
+ case ACONST_NULL => 1
+
+ case ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 => 1
+
+ case LCONST_0 |
+ LCONST_1 => 2
+
+ case FCONST_0 |
+ FCONST_1 |
+ FCONST_2 => 1
+
+ case DCONST_0 |
+ DCONST_1 => 2
+
+ case BIPUSH |
+ SIPUSH => 1
+
+ case LDC =>
+ instruction.asInstanceOf[LdcInsnNode].cst match {
+ case _: java.lang.Integer |
+ _: java.lang.Float |
+ _: String |
+ _: Type |
+ _: Handle => 1
+
+ case _: java.lang.Long |
+ _: java.lang.Double => 2
+ }
+
+ case ILOAD |
+ FLOAD |
+ ALOAD => 1
+
+ case LLOAD |
+ DLOAD => 2
+
+ case IALOAD |
+ FALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => 1
+
+ case LALOAD |
+ DALOAD => 2
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => 0
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => 0
+
+ case POP |
+ POP2 => 0
+
+ case DUP |
+ DUP_X1 |
+ DUP_X2 |
+ DUP2 |
+ DUP2_X1 |
+ DUP2_X2 |
+ SWAP => throw new IllegalArgumentException("Can't compute the size of DUP/SWAP without knowing what's on stack top")
+
+ case IADD |
+ FADD => 1
+
+ case LADD |
+ DADD => 2
+
+ case ISUB |
+ FSUB => 1
+
+ case LSUB |
+ DSUB => 2
+
+ case IMUL |
+ FMUL => 1
+
+ case LMUL |
+ DMUL => 2
+
+ case IDIV |
+ FDIV => 1
+
+ case LDIV |
+ DDIV => 2
+
+ case IREM |
+ FREM => 1
+
+ case LREM |
+ DREM => 2
+
+ case INEG |
+ FNEG => 1
+
+ case LNEG |
+ DNEG => 2
+
+ case ISHL |
+ ISHR => 1
+
+ case LSHL |
+ LSHR => 2
+
+ case IUSHR => 1
+
+ case LUSHR => 2
+
+ case IAND |
+ IOR |
+ IXOR => 1
+
+ case LAND |
+ LOR |
+ LXOR => 2
+
+ case IINC => 1
+
+ case I2F |
+ L2I |
+ L2F |
+ F2I |
+ D2I |
+ D2F |
+ I2B |
+ I2C |
+ I2S => 1
+
+ case I2L |
+ I2D |
+ L2D |
+ F2L |
+ F2D |
+ D2L => 2
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => 1
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => 0
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => 0
+
+ case GOTO => 0
+
+ case JSR => throw new IllegalArgumentException("Subroutines are not supported.")
+
+ case RET => 0
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => 0
+
+ case IRETURN |
+ FRETURN |
+ ARETURN => 1
+
+ case LRETURN |
+ DRETURN => 2
+
+ case RETURN => 0
+
+ case GETSTATIC => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTSTATIC => 0
+
+ case GETFIELD => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTFIELD => 0
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = instruction.asInstanceOf[MethodInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case INVOKEDYNAMIC =>
+ val desc = instruction.asInstanceOf[InvokeDynamicInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case NEW => 1
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => 1
+
+ case ATHROW => 0
+
+ case CHECKCAST |
+ INSTANCEOF => 1
+
+ case MONITORENTER |
+ MONITOREXIT => 0
+
+ case MULTIANEWARRAY => 1
+
+ case IFNULL |
+ IFNONNULL => 0
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 35ee889c58..953e43eaca 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -234,15 +234,16 @@ trait ScalaSettings extends AbsScalaSettings
val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
- val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled")
- val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath")
+ val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.")
+ val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled.")
+ val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath.")
val lNone = Choice("l:none", "Don't enable any optimizations.")
private val defaultChoices = List(unreachableCode)
val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
- private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals, nullnessTracking)
val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
private val projectChoices = List(lMethod, inlineProject)
@@ -264,6 +265,7 @@ trait ScalaSettings extends AbsScalaSettings
def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
+ def YoptNullnessTracking = Yopt.contains(YoptChoices.nullnessTracking)
def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject)
def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal)
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index e13aaad7bc..15d0f14938 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013
+// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015
package scala
@@ -26,12 +26,6 @@ package scala
* assert(javaVersion() == anonfun0())
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 620dcc19aa..572901c6f3 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -25,11 +25,8 @@ package scala
* }
* }}}
*
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle.
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 5690adb56a..e2c094ea40 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -25,12 +25,6 @@ package scala
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 254f14f13c..82e38d3549 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -462,6 +462,7 @@ object List extends SeqFactory[List] {
private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
private def writeObject(out: ObjectOutputStream) {
+ out.defaultWriteObject()
var xs: List[A] = orig
while (!xs.isEmpty) {
out.writeObject(xs.head)
@@ -473,6 +474,7 @@ object List extends SeqFactory[List] {
// Java serialization calls this before readResolve during de-serialization.
// Read the whole list and store it in `orig`.
private def readObject(in: ObjectInputStream) {
+ in.defaultReadObject()
val builder = List.newBuilder[A]
while (true) in.readObject match {
case ListSerializeEnd =>
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index f1ac161e9a..28e56a6d87 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -12,6 +12,9 @@ package immutable
import mutable.{ Builder, ListBuffer }
+// TODO: Now the specialization exists there is no clear reason to have
+// separate classes for Range/NumericRange. Investigate and consolidate.
+
/** `NumericRange` is a more generic version of the
* `Range` class which works with arbitrary types.
* It must be supplied with an `Integral` implementation of the
@@ -28,9 +31,6 @@ import mutable.{ Builder, ListBuffer }
* assert(r1 sameElements r2.map(_ - veryBig))
* }}}
*
- * TODO: Now the specialization exists there is no clear reason to have
- * separate classes for Range/NumericRange. Investigate and consolidate.
- *
* @author Paul Phillips
* @version 2.8
* @define Coll `NumericRange`
@@ -266,7 +266,7 @@ object NumericRange {
// Numbers may be big.
val one = num.one
val limit = num.fromInt(Int.MaxValue)
- def check(t: T): T =
+ def check(t: T): T =
if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.")
else t
// If the range crosses zero, it might overflow when subtracted
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 98266716cc..53af3ce158 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -56,11 +56,12 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
* @throws java.util.NoSuchElementException if the queue is too short.
*/
override def apply(n: Int): A = {
- val len = out.length
- if (n < len) out.apply(n)
+ val olen = out.length
+ if (n < olen) out.apply(n)
else {
- val m = n - len
- if (m < in.length) in.reverse.apply(m)
+ val m = n - olen
+ val ilen = in.length
+ if (m < ilen) in.apply(ilen - m - 1)
else throw new NoSuchElementException("index out of range")
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index cf7b7e272a..7edd36dc22 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1180,7 +1180,13 @@ object Stream extends SeqFactory[Stream] {
* to streams.
*/
class ConsWrapper[A](tl: => Stream[A]) {
+ /** Construct a stream consisting of a given first element followed by elements
+ * from a lazily evaluated Stream.
+ */
def #::(hd: A): Stream[A] = cons(hd, tl)
+ /** Construct a stream consisting of the concatenation of the given stream and
+ * a lazily evaluated Stream.
+ */
def #:::(prefix: Stream[A]): Stream[A] = prefix append tl
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 8faaf97741..f9bab40a1e 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -15,7 +15,7 @@ import immutable.{List, Nil, ::}
import java.io._
import scala.annotation.migration
-/** A `Buffer` implementation back up by a list. It provides constant time
+/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
*
* @author Matthias Zenger
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index d6e2963ad8..6bb35606a6 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -49,7 +49,7 @@ object BigDecimal {
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Double.toString(d), mc))
+ new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
@@ -59,7 +59,7 @@ object BigDecimal {
* `0.1 != 0.1f`.
*/
def decimal(f: Float, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Float.toString(f), mc))
+ new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index eafbf96993..9245798c17 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -134,7 +134,7 @@ object Numeric {
def div(x: Float, y: Float): Float = x / y
}
trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] {
- def quot(x: Float, y: Float): Float = (BigDecimal(x) / BigDecimal(y)).floatValue
+ def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue
def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue
}
implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering
@@ -158,7 +158,7 @@ object Numeric {
def div(x: Double, y: Double): Double = x / y
}
trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] {
- def quot(x: Double, y: Double): Double = (BigDecimal(x) / BigDecimal(y)).doubleValue
+ def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue
def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue
}
@@ -178,7 +178,7 @@ object Numeric {
def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
}
trait BigDecimalAsIfIntegral extends BigDecimalIsConflicted with Integral[BigDecimal] {
- def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
+ def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x quot y
def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y
}
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 2e021ad9d9..ee2bdbc4a7 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson **
+** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,518 +9,276 @@
package scala
package util
-import scala.reflect.{ ClassTag, classTag }
-import scala.math.{ Ordering, max, min }
+import scala.reflect.ClassTag
+import scala.math.Ordering
-/** The Sorting object provides functions that can sort various kinds of
- * objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as [[scala.math.Ordered]]. Some sorts that
- * operate directly on a subset of value types are also provided. These
- * implementations are derived from those in the Sun JDK.
+/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`.
+ * Methods that defer to `java.util.Arrays.sort` say that they do or under what
+ * conditions that they do.
*
- * Note that stability doesn't matter for value types, so use the `quickSort`
- * variants for those. `stableSort` is intended to be used with
- * objects when the prior ordering should be preserved, where possible.
+ * `Sorting` also implements a general-purpose quicksort and stable (merge) sort
+ * for those cases where `java.util.Arrays.sort` could only be used at the cost
+ * of a large memory penalty. If performance rather than memory usage is the
+ * primary concern, one may wish to find alternate strategies to use
+ * `java.util.Arrays.sort` directly e.g. by boxing primitives to use
+ * a custom ordering on them.
+ *
+ * `Sorting` provides methods where you can provide a comparison function, or
+ * can request a sort of items that are [[scala.math.Ordered]] or that
+ * otherwise have an implicit or explicit [[scala.math.Ordering]].
+ *
+ * Note also that high-performance non-default sorts for numeric types
+ * are not provided. If this is required, it is advisable to investigate
+ * other libraries that cover this use case.
*
* @author Ross Judson
- * @version 1.0
+ * @author Adriaan Moors
+ * @author Rex Kerr
+ * @version 1.1
*/
object Sorting {
- /** Quickly sort an array of Doubles. */
- def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of items with an implicit Ordering. */
- def quickSort[K: Ordering](a: Array[K]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Ints. */
- def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Floats. */
- def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
-
- /** Sort an array of K where K is Ordered, preserving the existing order
- * where the values are equal. */
- def stableSort[K: ClassTag: Ordering](a: Array[K]) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _)
- }
+ /** Sort an array of Doubles using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a)
- /** Sorts an array of `K` given an ordering function `f`.
- * `f` should return `true` iff its first parameter is strictly less than its second parameter.
- */
- def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), f)
- }
+ /** Sort an array of Ints using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a)
- /** Sorts an arbitrary sequence into an array, given a comparison function
- * that should return `true` iff parameter one is strictly less than parameter two.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
- val ret = a.toArray
- stableSort(ret, f)
- ret
- }
+ /** Sort an array of Floats using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
+
+ private final val qsortThreshold = 16
- /** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] =
- stableSort(a, Ordering[K].lt _)
-
- /** Stably sorts a sequence of items given an extraction function that will
- * return an ordered key from an item.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] =
- stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f)
-
- private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) {
- val ord = Ordering[K]
- import ord._
-
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
- }
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
+ /** Sort array `a` with quicksort, using the Ordering on its elements.
+ * This algorithm sorts in place, so no additional memory is used aside from
+ * what might be required to box individual elements during comparison.
+ */
+ def quickSort[K: Ordering](a: Array[K]): Unit = {
+ // Must have iN >= i0 or math will fail. Also, i0 >= 0.
+ def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
+ if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ var iK = (i0 + iN) >>> 1 // Unsigned div by 2
+ // Find index of median of first, central, and last elements
+ var pL =
+ if (ord.compare(a(i0), a(iN - 1)) <= 0)
+ if (ord.compare(a(i0), a(iK)) < 0)
+ if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
+ else i0
+ else
+ if (ord.compare(a(i0), a(iK)) < 0) i0
+ else
+ if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1
+ else iK
+ val pivot = a(pL)
+ // pL is the start of the pivot block; move it into the middle if needed
+ if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK }
+ // Elements equal to the pivot will be in range pL until pR
+ var pR = pL + 1
+ // Items known to be less than pivot are below iA (range i0 until iA)
+ var iA = i0
+ // Items known to be greater than pivot are at or above iB (range iB until iN)
+ var iB = iN
+ // Scan through everything in the buffer before the pivot(s)
+ while (pL - iA > 0) {
+ val current = a(iA)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iA) = a(pL - 1)
+ a(pL - 1) = current
+ pL -= 1
+ case x if x < 0 =>
+ // Already in place. Just update indicies.
+ iA += 1
+ case _ if iB > pR =>
+ // Wrong side. There's room on the other side, so swap
+ a(iA) = a(iB - 1)
+ a(iB - 1) = current
+ iB -= 1
+ case _ =>
+ // Wrong side and there is no room. Swap by rotating pivot block.
+ a(iA) = a(pL - 1)
+ a(pL - 1) = a(pR - 1)
+ a(pR - 1) = current
+ pL -= 1
+ pR -= 1
+ iB -= 1
}
- i += 1
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
+ // Get anything remaining in buffer after the pivot(s)
+ while (iB - pR > 0) {
+ val current = a(iB - 1)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iB - 1) = a(pR)
+ a(pR) = current
+ pR += 1
+ case x if x > 0 =>
+ // Already in place. Just update indices.
+ iB -= 1
+ case _ =>
+ // Wrong side and we already know there is no room. Swap by rotating pivot block.
+ a(iB - 1) = a(pR)
+ a(pR) = a(pL)
+ a(pL) = current
+ iA += 1
+ pL += 1
+ pR += 1
}
- m = med3(l, m, n) // Mid-size, med of 3
}
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) equiv v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) equiv v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen
+ if (iA - i0 < iN - iB) {
+ inner(a, i0, iA, ord) // True recursion
+ inner(a, iB, iN, ord) // Should be tail recursion
+ }
+ else {
+ inner(a, iB, iN, ord) // True recursion
+ inner(a, i0, iA, ord) // Should be tail recursion
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
}
}
- sort2(off, len)
+ inner(a, 0, a.length, implicitly[Ordering[K]])
}
-
- private def sort1(x: Array[Int], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ private final val mergeThreshold = 32
+
+ // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
+ // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
+ private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
+ val n = iN - i0
+ if (n < 2) return
+ if (ord.compare(a(i0), a(i0+1)) > 0) {
+ val temp = a(i0)
+ a(i0) = a(i0+1)
+ a(i0+1) = temp
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j>off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
+ var m = 2
+ while (m < n) {
+ // Speed up already-sorted case by checking last element first
+ val next = a(i0 + m)
+ if (ord.compare(next, a(i0+m-1)) < 0) {
+ var iA = i0
+ var iB = i0 + m - 1
+ while (iB - iA > 1) {
+ val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2
+ if (ord.compare(next, a(ix)) < 0) iB = ix
+ else iA = ix
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) == v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) == v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1)
+ var i = i0 + m
+ while (i > ix) {
+ a(i) = a(i-1)
+ i -= 1
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ a(ix) = next
}
+ m += 1
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Double], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
+ private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
+ if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow
+ val sc = if (scratch eq null) new Array[T](iK - i0) else scratch
+ mergeSort(a, i0, iK, ord, sc)
+ mergeSort(a, iK, iN, ord, sc)
+ mergeSorted(a, i0, iK, iN, ord, sc)
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
+ }
+
+ // Must have 0 <= i0 < iK < iN
+ private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
+ // Check to make sure we're not already in order
+ if (ord.compare(a(iK-1), a(iK)) > 0) {
+ var i = i0
+ val jN = iK - i0
+ var j = 0
+ while (i < iK) {
+ scratch (j) = a(i)
i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
+ j += 1
}
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ var k = i0
+ j = 0
+ while (i < iN && j < jN) {
+ if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 }
+ else { a(k) = scratch(j); j += 1 }
+ k += 1
}
+ while (j < jN) { a(k) = scratch(j); j += 1; k += 1 }
+ // Don't need to finish a(i) because it's already in place, k = i
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Float], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Why would you even do this?
+ private def booleanSort(a: Array[Boolean]): Unit = {
+ var i = 0
+ var n = 0
+ while (i < a.length) {
+ if (!a(i)) n += 1
+ i += 1
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
+ i = 0
+ while (i < n) {
+ a(i) = false
+ i += 1
}
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
- }
+ while (i < a.length) {
+ a(i) = true
+ i += 1
}
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
+ }
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
+ // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
+ // Maybe also rename all these methods to `sort`.
+ @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
+ case _: Array[AnyRef] =>
+ // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
+ if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
+ java.util.Arrays.sort(a, ord)
+ case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord)
+ case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord)
+ case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord)
+ case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord)
+ case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord)
+ case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord)
+ // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case.
+ case null => throw new NullPointerException
+ }
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
+ // TODO: remove unnecessary ClassTag (not binary compatible)
+ /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K])
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
- }
- }
- sort2(off, len)
+ // TODO: Remove unnecessary ClassTag (not binary compatible)
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f)
+
+ /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[K])
+ ret
}
- private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
- if (lo < hi) {
- val mid = (lo+hi) / 2
- stableSort(a, lo, mid, scratch, f)
- stableSort(a, mid+1, hi, scratch, f)
- var k, t_lo = lo
- var t_hi = mid + 1
- while (k <= hi) {
- if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) {
- scratch(k) = a(t_lo)
- t_lo += 1
- } else {
- scratch(k) = a(t_hi)
- t_hi += 1
- }
- k += 1
- }
- k = lo
- while (k <= hi) {
- a(k) = scratch(k)
- k += 1
- }
- }
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering fromLessThan f)
+ ret
+ }
+
+ /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[M] on f)
+ ret
}
}
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 0a6a7972c2..b0eae74043 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -48,7 +48,7 @@ import scala.language.implicitConversions
* catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
* pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
* upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
- * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * passed on down the chain. Combinators such as `recover` and `recoverWith` are designed to provide some type of
* default behavior in the case of failure.
*
* ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index ce60ade9f5..8c32a92ecd 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1184,6 +1184,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
+ if (jconstr.javaFlags.isVarargs) constr modifyInfo arrayToRepeated
markAllCompleted(constr)
constr
}
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index c281126d5f..e355d9f864 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -69,6 +69,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// Used in a test case.
def showDirectory() = replOutput.show(out)
+ lazy val isClassBased: Boolean = settings.Yreplclassbased.value
+
private[nsc] var printResults = true // whether to print result lines
private[nsc] var totalSilence = false // whether to print anything
private var _initializeComplete = false // compiler is initialized
@@ -310,8 +312,14 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
def originalPath(name: String): String = originalPath(TermName(name))
- def originalPath(name: Name): String = typerOp path name
- def originalPath(sym: Symbol): String = typerOp path sym
+ def originalPath(name: Name): String = translateOriginalPath(typerOp path name)
+ def originalPath(sym: Symbol): String = translateOriginalPath(typerOp path sym)
+ /** For class based repl mode we use an .INSTANCE accessor. */
+ val readInstanceName = if(isClassBased) ".INSTANCE" else ""
+ def translateOriginalPath(p: String): String = {
+ val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read)
+ p.replaceFirst(readName, readName + readInstanceName)
+ }
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
@@ -758,11 +766,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// object and we can do that much less wrapping.
def packageDecl = "package " + packageName
+ def pathToInstance(name: String) = packageName + "." + name + readInstanceName
def pathTo(name: String) = packageName + "." + name
def packaged(code: String) = packageDecl + "\n\n" + code
- def readPath = pathTo(readName)
- def evalPath = pathTo(evalName)
+ def readPathInstance = pathToInstance(readName)
+ def readPath = pathTo(readName)
+ def evalPath = pathTo(evalName)
def call(name: String, args: Any*): AnyRef = {
val m = evalMethod(name)
@@ -802,7 +812,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** The innermost object inside the wrapper, found by
* following accessPath into the outer one.
*/
- def resolvePathToSymbol(accessPath: String): Symbol = {
+ def resolvePathToSymbol(fullAccessPath: String): Symbol = {
+ val accessPath = fullAccessPath.stripPrefix(readPath)
val readRoot = readRootPath(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
@@ -849,7 +860,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def defines = defHandlers flatMap (_.definedSymbols)
def imports = importedSymbols
def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
-
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -858,6 +868,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** handlers for each tree in this request */
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ val definesClass = handlers.exists {
+ case _: ClassHandler => true
+ case _ => false
+ }
+
def defHandlers = handlers collect { case x: MemberDefHandler => x }
/** list of names used by this expression */
@@ -875,13 +890,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
* append to objectName to access anything bound by request.
*/
lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode))
+ exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass))
/** the line of code to compute */
def toCompute = line
/** The path of the value that contains the user code. */
- def fullAccessPath = s"${lineRep.readPath}$accessPath"
+ def fullAccessPath = s"${lineRep.readPathInstance}$accessPath"
/** The path of the given member of the wrapping instance. */
def fullPath(vname: String) = s"$fullAccessPath.`$vname`"
@@ -911,7 +926,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap: String
}
- private class ObjectBasedWrapper extends Wrapper {
+ class ObjectBasedWrapper extends Wrapper {
def preambleHeader = "object %s {"
def postamble = importsTrailer + "\n}"
@@ -919,13 +934,16 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap = "}\n"
}
- private class ClassBasedWrapper extends Wrapper {
- def preambleHeader = "class %s extends Serializable {"
+ class ClassBasedWrapper extends Wrapper {
+ def preambleHeader = "class %s extends Serializable { "
/** Adds an object that instantiates the outer wrapping class. */
- def postamble = s"""$importsTrailer
+ def postamble = s"""
+ |$importsTrailer
+ |}
+ |object ${lineRep.readName} {
+ | val INSTANCE = new ${lineRep.readName}();
|}
- |object ${lineRep.readName} extends ${lineRep.readName}
|""".stripMargin
import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
@@ -935,7 +953,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
private lazy val ObjectSourceCode: Wrapper =
- if (settings.Yreplclassbased) new ClassBasedWrapper else new ObjectBasedWrapper
+ if (isClassBased) new ClassBasedWrapper else new ObjectBasedWrapper
private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
@@ -994,7 +1012,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+ lazy val resultSymbol = lineRep.resolvePathToSymbol(fullAccessPath)
def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 5244858a62..3ec77e46f1 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -92,7 +92,7 @@ trait Imports {
* last one imported is actually usable.
*/
case class ComputedImports(prepend: String, append: String, access: String)
- protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper): ComputedImports = {
+ protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = {
/** Narrow down the list of requests from which imports
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
@@ -107,6 +107,8 @@ trait Imports {
// Single symbol imports might be implicits! See bug #1752. Rather than
// try to finesse this, we will mimic all imports for now.
def keepHandler(handler: MemberHandler) = handler match {
+ /* While defining classes in class based mode - implicits are not needed. */
+ case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x))
case _: ImportHandler => true
case x => x.definesImplicit || (x.definedNames exists wanted)
}
@@ -146,7 +148,10 @@ trait Imports {
// loop through previous requests, adding imports for each one
wrapBeforeAndAfter {
+ // Reusing a single temporary value when import from a line with multiple definitions.
+ val tempValLines = mutable.Set[Int]()
for (ReqAndHandler(req, handler) <- reqsToUse) {
+ val objName = req.lineRep.readPathInstance
handler match {
// If the user entered an import, then just use it; add an import wrapping
// level if the import might conflict with some other import
@@ -157,6 +162,23 @@ trait Imports {
code append (x.member + "\n")
currentImps ++= x.importedNames
+ case x if isClassBased =>
+ for (imv <- x.definedNames) {
+ if (!currentImps.contains(imv)) {
+ x match {
+ case _: ClassHandler =>
+ code.append("import " + objName + req.accessPath + ".`" + imv + "`\n")
+ case _ =>
+ val valName = req.lineRep.packageName + req.lineRep.readName
+ if (!tempValLines.contains(req.lineRep.lineId)) {
+ code.append(s"val $valName = $objName\n")
+ tempValLines += req.lineRep.lineId
+ }
+ code.append(s"import $valName${req.accessPath}.`$imv`;\n")
+ }
+ currentImps += imv
+ }
+ }
// For other requests, import each defined name.
// import them explicitly instead of with _, so that
// ambiguity errors will not be generated. Also, quote
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index c384ed7034..81036b4908 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -177,7 +177,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
}
{
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index f984b4579f..778839a1f5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -90,8 +90,12 @@ trait ModelFactoryImplicitSupport {
else {
val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
- val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
+ val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) ++
+ global.analyzer.allViewsFrom(byNameType(sym.tpe_*), context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ //debug(results.mkString("All views\n ", "\n ", "\n"))
+ //debug(conversions.mkString("Conversions\n ", "\n ", "\n"))
+
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
@@ -193,7 +197,7 @@ trait ModelFactoryImplicitSupport {
List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
} catch {
case i: ImplicitNotFound =>
- //println(" Eliminating: " + toType)
+ //debug(s" Eliminating: $toType")
Nil
}
}
diff --git a/test/files/neg/inlineMaxSize.check b/test/files/neg/inlineMaxSize.check
index d218a8b6e2..9d790e154c 100644
--- a/test/files/neg/inlineMaxSize.check
+++ b/test/files/neg/inlineMaxSize.check
@@ -2,8 +2,8 @@ inlineMaxSize.scala:7: warning: C::i()I is annotated @inline but could not be in
The size of the callsite method C::j()I
would exceed the JVM method size limit after inlining C::i()I.
- @inline final def j = i + i
- ^
+ @inline final def j = i + i + i
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
one warning found
one error found
diff --git a/test/files/neg/inlineMaxSize.scala b/test/files/neg/inlineMaxSize.scala
index 16dc0d9538..9d2db1a357 100644
--- a/test/files/neg/inlineMaxSize.scala
+++ b/test/files/neg/inlineMaxSize.scala
@@ -4,5 +4,5 @@ class C {
@inline final def g = f + f + f + f + f + f + f + f + f + f
@inline final def h = g + g + g + g + g + g + g + g + g + g
@inline final def i = h + h + h + h + h + h + h + h + h + h
- @inline final def j = i + i
+ @inline final def j = i + i + i
}
diff --git a/test/files/pos/t9356/Foo_2.scala b/test/files/pos/t9356/Foo_2.scala
new file mode 100644
index 0000000000..ab7bb44d0e
--- /dev/null
+++ b/test/files/pos/t9356/Foo_2.scala
@@ -0,0 +1,6 @@
+class C
+
+trait Foo {
+ @annot.MyAnnotation(cls = classOf[C])
+ def function: Any = ???
+}
diff --git a/test/files/pos/t9356/MyAnnotation.java b/test/files/pos/t9356/MyAnnotation.java
new file mode 100644
index 0000000000..b6c00e7356
--- /dev/null
+++ b/test/files/pos/t9356/MyAnnotation.java
@@ -0,0 +1,12 @@
+package annot;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface MyAnnotation {
+ Class<?> cls();
+}
diff --git a/test/files/pos/t9356/Test_3.scala b/test/files/pos/t9356/Test_3.scala
new file mode 100644
index 0000000000..fa1b76c9e1
--- /dev/null
+++ b/test/files/pos/t9356/Test_3.scala
@@ -0,0 +1,3 @@
+class Foo1 extends Foo
+
+class Foo2 extends Foo \ No newline at end of file
diff --git a/test/files/run/range.scala b/test/files/run/range.scala
index 4637ab874d..e50d0ac6a5 100644
--- a/test/files/run/range.scala
+++ b/test/files/run/range.scala
@@ -36,16 +36,19 @@ object Test {
def gr1 = NumericRange(x, x, x)
def gr2 = NumericRange.inclusive(x, x, x)
- def gr3 = NumericRange(x, x * fromInt(10), x)
- def gr4 = NumericRange.inclusive(x, x * fromInt(10), x)
- def gr5 = gr3.toList ::: negated.gr3.toList
+ def gr3 = NumericRange(x, x * fromInt(4), x * fromInt(2)) // SI-9348
+ def gr4 = NumericRange(x, x * fromInt(-2), x * fromInt(-2))
+ def gr5 = NumericRange(x, x * fromInt(10), x)
+ def gr6 = NumericRange.inclusive(x, x * fromInt(10), x)
+ def gr7 = gr3.toList ::: negated.gr3.toList
def check = {
assert(gr1.isEmpty && !gr2.isEmpty)
- assert(gr3.size == 9 && gr4.size == 10)
- assert(gr5.sum == num.zero, gr5.toString)
- assert(!(gr3 contains (x * fromInt(10))))
- assert((gr4 contains (x * fromInt(10))))
+ assert(gr3.size == 2 && gr4.size == 2)
+ assert(gr5.size == 9 && gr6.size == 10)
+ assert(gr7.sum == num.zero, gr7.toString)
+ assert(!(gr5 contains (x * fromInt(10))))
+ assert(gr6 contains (x * fromInt(10)))
}
}
@@ -55,6 +58,7 @@ object Test {
val _grs = List[GR[_]](
GR(BigDecimal(5.0)),
+ GR(BigDecimal(0.25)), // SI-9348
GR(BigInt(5)),
GR(5L),
GR(5.0d),
diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check
new file mode 100644
index 0000000000..eb62729f5c
--- /dev/null
+++ b/test/files/run/repl-serialization.check
@@ -0,0 +1,25 @@
+== evaluating lines
+extract: AnyRef => Unit = <function1>
+ evaluating x
+x: Int = 0
+getX: ()Int
+defined class U
+y: Int = <lazy>
+ evaluating z
+ evaluating zz
+defined class D
+z: Int = 0
+zz: Int = 0
+defined object O
+defined class A
+defined type alias AA
+constructing U
+u: U = U
+== evaluating lambda
+ evaluating y
+ evaluating O
+ constructing A
+== reconstituting into a fresh classloader
+ evaluating O
+== evaluating reconstituted lambda
+ constructing A
diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala
new file mode 100644
index 0000000000..55b7519631
--- /dev/null
+++ b/test/files/run/repl-serialization.scala
@@ -0,0 +1,68 @@
+import java.io._
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter.IMain
+import scala.tools.nsc.util._
+import scala.reflect.internal.util.AbstractFileClassLoader
+
+object Test {
+ def main(args: Array[String]) {
+ run()
+ }
+
+ def run(): Unit = {
+ val settings = new Settings()
+ settings.Yreplclassbased.value = true
+ settings.usejavacp.value = true
+
+ var imain: IMain = null
+ object extract extends ((AnyRef) => Unit) with Serializable {
+ var value: AnyRef = null
+
+ def apply(a: AnyRef) = value = a
+ }
+
+ val code =
+ """val x = {println(" evaluating x"); 0 }
+ |def getX() = x
+ |class U extends Serializable { println("constructing U"); val x = 0 ; override def toString = "U" }
+ |lazy val y = {println(" evaluating y"); 0 }
+ |class D; val z = {println(" evaluating z"); 0}; val zz = {println(" evaluating zz"); 0}
+ |object O extends Serializable { val apply = {println(" evaluating O"); 0} }
+ |class A(i: Int) { println(" constructing A") }
+ |type AA = A
+ |val u = new U()
+ |extract(() => new AA(x + getX() + y + z + zz + O.apply + u.x))
+ """.stripMargin
+
+ imain = new IMain(settings)
+ println("== evaluating lines")
+ imain.directBind("extract", "(AnyRef => Unit)", extract)
+ code.lines.foreach(imain.interpret)
+
+ val virtualFile: AbstractFile = extract.value.getClass.getClassLoader.asInstanceOf[AbstractFileClassLoader].root
+ val newLoader = new AbstractFileClassLoader(virtualFile, getClass.getClassLoader)
+
+ def deserializeInNewLoader(string: Array[Byte]): AnyRef = {
+ val bis = new ByteArrayInputStream(string)
+ val in = new ObjectInputStream(bis) {
+ override def resolveClass(desc: ObjectStreamClass) = Class.forName(desc.getName, false, newLoader)
+ }
+ in.readObject()
+ }
+ def serialize(o: AnyRef): Array[Byte] = {
+ val bos = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bos)
+ out.writeObject(o)
+ out.close()
+ bos.toByteArray
+ }
+ println("== evaluating lambda")
+ extract.value.asInstanceOf[() => Any].apply()
+ println("== reconstituting into a fresh classloader")
+ val reconstituted = deserializeInNewLoader(serialize(extract.value)).asInstanceOf[() => Any]
+ println("== evaluating reconstituted lambda")
+ reconstituted.apply() // should not print("evaluating x") a second time
+ }
+}
diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check
index 105b238d01..5f436ba6b1 100644
--- a/test/files/run/t7747-repl.check
+++ b/test/files/run/t7747-repl.check
@@ -112,7 +112,7 @@ scala> 55 ; ((2 + 2)) ; (1, 2, 3)
res15: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
-<console>:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:9: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; (x: Int) => x + 1 ;;
^
res16: () => Int = <function0>
@@ -258,12 +258,12 @@ class $read extends Serializable {
super.<init>;
()
};
- import $line44.$read.$iw.$iw.BippyBups;
- import $line44.$read.$iw.$iw.BippyBups;
- import $line45.$read.$iw.$iw.PuppyPups;
- import $line45.$read.$iw.$iw.PuppyPups;
- import $line46.$read.$iw.$iw.Bingo;
- import $line46.$read.$iw.$iw.Bingo;
+ import $line44.$read.INSTANCE.$iw.$iw.BippyBups;
+ import $line44.$read.INSTANCE.$iw.$iw.BippyBups;
+ import $line45.$read.INSTANCE.$iw.$iw.PuppyPups;
+ import $line45.$read.INSTANCE.$iw.$iw.PuppyPups;
+ import $line46.$read.INSTANCE.$iw.$iw.Bingo;
+ import $line46.$read.INSTANCE.$iw.$iw.Bingo;
class $iw extends Serializable {
def <init>() = {
super.<init>;
@@ -275,12 +275,35 @@ class $read extends Serializable {
};
val $iw = new $iw.<init>
}
-object $read extends $read {
+object $read extends scala.AnyRef {
def <init>() = {
super.<init>;
()
- }
+ };
+ val INSTANCE = new $read.<init>
}
res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo())
+scala> case class Sum(exp: String, exp2: String)
+defined class Sum
+
+scala> val a = Sum("A", "B")
+a: Sum = Sum(A,B)
+
+scala> def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
+b: (a: Sum)String
+
+scala> b(a)
+res4: String = Found Sum
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> intp.lastRequest
+res5: $r.intp.Request = Request(line=def $ires3 = intp.global, 1 trees)
+
scala> :quit
diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala
index 0e64210460..141c2d9844 100644
--- a/test/files/run/t7747-repl.scala
+++ b/test/files/run/t7747-repl.scala
@@ -65,5 +65,11 @@ object Test extends ReplTest {
|case class PuppyPups()
|case class Bingo()
|List(BippyBups(), PuppyPups(), Bingo()) // show
+ |case class Sum(exp: String, exp2: String)
+ |val a = Sum("A", "B")
+ |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
+ |b(a)
+ |:power
+ |intp.lastRequest
|""".stripMargin
}
diff --git a/test/files/run/toolbox-varargs/Test.scala b/test/files/run/toolbox-varargs/Test.scala
new file mode 100644
index 0000000000..be5ab45768
--- /dev/null
+++ b/test/files/run/toolbox-varargs/Test.scala
@@ -0,0 +1,13 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ import scala.tools.reflect.ToolBox
+ val m = reflect.runtime.currentMirror
+ val u = m.universe
+ import u._
+ val tb = m.mkToolBox();
+ tb.compile(q"new p.Varargs(null, null)")
+ tb.compile(q"p.Varargs.staticMethod(null, null)")
+ tb.compile(q"(null: p.Varargs).instanceMethod(null, null)")
+ }
+}
+
diff --git a/test/files/run/toolbox-varargs/Varargs.java b/test/files/run/toolbox-varargs/Varargs.java
new file mode 100644
index 0000000000..da1dbbacc9
--- /dev/null
+++ b/test/files/run/toolbox-varargs/Varargs.java
@@ -0,0 +1,8 @@
+package p;
+
+public class Varargs {
+ public Varargs(String... args) {}
+ public static void staticMethod(String... args) {}
+
+ public void instanceMethod(String... args) {}
+}
diff --git a/test/junit/scala/collection/immutable/RangeConsistencyTest.scala b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala
index 3980c31577..135796979d 100644
--- a/test/junit/scala/collection/immutable/RangeConsistencyTest.scala
+++ b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala
@@ -137,4 +137,15 @@ class RangeConsistencyTest {
assert( (-3 to Int.MaxValue).dropWhile(_ <= 0).length == Int.MaxValue )
assert( (-3 to Int.MaxValue).span(_ <= 0) match { case (a,b) => a.length == 4 && b.length == Int.MaxValue } )
}
+
+ @Test
+ def testSI9348() {
+ // Test exclusive range with (end-start) != 0 (mod step)
+ assert( (0.0f until 0.4f by 0.25f) sameElements List(0.0f, 0.25f) )
+ assert( (1.0 until 2.2 by 0.5) sameElements List(1.0, 1.5, 2.0) )
+
+ def bd(d: Double) = BigDecimal(d)
+ val bdRange = bd(-10.0) until bd(0.0) by bd(4.5)
+ assert( bdRange sameElements List(bd(-10.0), bd(-5.5), bd(-1.0)) )
+ }
}
diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala
index c7a63da890..a9e2481f37 100644
--- a/test/junit/scala/math/BigDecimalTest.scala
+++ b/test/junit/scala/math/BigDecimalTest.scala
@@ -228,4 +228,36 @@ class BigDecimalTest {
def test_SI8970() {
assert((0.1).## == BigDecimal(0.1).##)
}
+
+ // Motivated by the problem of MathContext lost
+ @Test
+ def testMathContext() {
+ def testPrecision() {
+ val p = 1000
+ val n = BigDecimal("1.1", MC.UNLIMITED).pow(p)
+
+ // BigDecimal(x: Float, mc: MC), which may not do what you want, is deprecated
+ assert(BigDecimal(1.1f, MC.UNLIMITED).pow(p) == BigDecimal(java.lang.Double.toString(1.1f.toDouble), MC.UNLIMITED).pow(p))
+ assert(BigDecimal(1.1d, MC.UNLIMITED).pow(p) == n)
+ assert(BigDecimal(new BD("1.1"), MC.UNLIMITED).pow(p) == n)
+
+ assert(BigDecimal.decimal(1.1f, MC.UNLIMITED).pow(p) == n)
+ assert(BigDecimal.decimal(1.1d, MC.UNLIMITED).pow(p) == n)
+ assert(BigDecimal.decimal(new BD("1.1"), MC.UNLIMITED).pow(p) == n)
+
+ assert((BigDecimal(11, MC.UNLIMITED) / 10).pow(p) == n)
+ assert((BigDecimal.decimal(11, MC.UNLIMITED) / 10).pow(p) == n)
+ }
+
+ def testRounded() {
+ // the default rounding mode is HALF_UP
+ assert((BigDecimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) // deprecated api
+ assert((BigDecimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24"))
+ assert((BigDecimal.decimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24"))
+ assert((BigDecimal.decimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24"))
+ }
+
+ testPrecision()
+ testRounded()
+ }
}
diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala
index 9bf7d4f1e4..682dcbfd75 100644
--- a/test/junit/scala/math/NumericTest.scala
+++ b/test/junit/scala/math/NumericTest.scala
@@ -5,6 +5,9 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.math.Numeric.FloatAsIfIntegral
+
+
@RunWith(classOf[JUnit4])
class NumericTest {
@@ -14,5 +17,28 @@ class NumericTest {
assertTrue(-0.0.abs equals 0.0)
assertTrue(-0.0f.abs equals 0.0f)
}
-}
+
+ /* Test for SI-9348 */
+ @Test
+ def testFloatAsIfIntegral {
+ val num = scala.math.Numeric.FloatAsIfIntegral
+ assertTrue(num.quot(1.0f, 0.5f) equals 2.0f)
+ assertTrue(num.quot(1.0f, 0.3f) equals 3.0f)
+ }
+
+ /* Test for SI-9348 */
+ @Test
+ def testDoubleAsIfIntegral {
+ val num = scala.math.Numeric.DoubleAsIfIntegral
+ assertTrue(num.quot(1.0, 0.25) equals 4.0)
+ assertTrue(num.quot(0.5, 0.15) equals 3.0)
+ }
+
+ /* Test for SI-9348 */
+ @Test
+ def testBigDecimalAsIfIntegral {
+ val num = scala.math.Numeric.BigDecimalAsIfIntegral
+ assertTrue(num.quot(BigDecimal(2.5), BigDecimal(0.5)) equals BigDecimal(5.0))
+ assertTrue(num.quot(BigDecimal(5.0), BigDecimal(2.0)) equals BigDecimal(2.0))
+ }}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
new file mode 100644
index 0000000000..3a85f03da2
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
@@ -0,0 +1,231 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.asm.tree.{AbstractInsnNode, MethodNode}
+import scala.tools.nsc.backend.jvm.BTypes._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.tools.testing.ClearAfterClass
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+import AsmUtils._
+
+import scala.collection.convert.decorateAsScala._
+
+object NullnessAnalyzerTest extends ClearAfterClass.Clearable {
+ var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
+
+ def clear(): Unit = {
+ noOptCompiler = null
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class NullnessAnalyzerTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = NullnessAnalyzerTest
+ val noOptCompiler = NullnessAnalyzerTest.noOptCompiler
+
+ def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C"): NullnessAnalyzer = {
+ val nullnessAnalyzer = new NullnessAnalyzer
+ nullnessAnalyzer.analyze(classInternalName, methodNode)
+ nullnessAnalyzer
+ }
+
+ /**
+ * Instructions that match `query` when textified.
+ * If `query` starts with a `+`, the next instruction is returned.
+ */
+ def findInstr(method: MethodNode, query: String): List[AbstractInsnNode] = {
+ val useNext = query(0) == '+'
+ val instrPart = if (useNext) query.drop(1) else query
+ val insns = method.instructions.iterator.asScala.find(i => textify(i) contains instrPart).toList
+ if (useNext) insns.map(_.getNext) else insns
+ }
+
+ def testNullness(analyzer: NullnessAnalyzer, method: MethodNode, query: String, index: Int, nullness: Nullness): Unit = {
+ for (i <- findInstr(method, query)) {
+ val r = analyzer.frameAt(i, method).getValue(index).nullness
+ assertTrue(s"Expected: $nullness, found: $r. At instr ${textify(i)}", nullness == r)
+ }
+ }
+
+ // debug / helper for writing tests
+ def showAllNullnessFrames(analyzer: NullnessAnalyzer, method: MethodNode): String = {
+ val instrLength = method.instructions.iterator.asScala.map(textify(_).length).max
+ val lines = for (i <- method.instructions.iterator.asScala) yield {
+ val f = analyzer.frameAt(i, method)
+ val frameString = {
+ if (f == null) "null"
+ else (0 until (f.getLocals + f.getStackSize)).iterator
+ .map(f.getValue(_).toString)
+ .map(s => "%8s".format(s))
+ .zipWithIndex.map({case (s, i) => s"$i: $s"})
+ .mkString(", ")
+ }
+ ("%"+ instrLength +"s: %s").format(textify(i), frameString)
+ }
+ lines.mkString("\n")
+ }
+
+ @Test
+ def showNullnessFramesTest(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("def f = this.toString")
+
+ // NOTE: the frame for an instruction represents the state *before* executing that instr.
+ // So in the frame for `ALOAD 0`, the stack is still empty.
+
+ val res =
+ """ L0: 0: NotNull
+ | LINENUMBER 1 L0: 0: NotNull
+ | ALOAD 0: 0: NotNull
+ |INVOKEVIRTUAL java/lang/Object.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull
+ | ARETURN: 0: NotNull, 1: Unknown1
+ | L0: null""".stripMargin
+ assertEquals(showAllNullnessFrames(newNullnessAnalyzer(m), m), res)
+ }
+
+ @Test
+ def thisNonNull(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("def f = this.toString")
+ val a = newNullnessAnalyzer(m)
+ testNullness(a, m, "ALOAD 0", 0, NotNull)
+ }
+
+ @Test
+ def instanceMethodCall(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("def f(a: String) = a.trim")
+ val a = newNullnessAnalyzer(m)
+ testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, Unknown)
+ testNullness(a, m, "ARETURN", 1, NotNull)
+ }
+
+ @Test
+ def constructorCall(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Object; a.toString }")
+ val a = newNullnessAnalyzer(m)
+
+ // for reference, the output of showAllNullnessFrames(a, m) - note that the frame represents the state *before* executing the instr.
+ // NEW java/lang/Object: 0: NotNull, 1: Unknown
+ // DUP: 0: NotNull, 1: Unknown, 2: Unknown
+ // INVOKESPECIAL java/lang/Object.<init>: 0: NotNull, 1: Unknown, 2: Unknown, 3: Unknown
+ // ASTORE 1: 0: NotNull, 1: Unknown, 2: NotNull
+ // ALOAD 1: 0: NotNull, 1: NotNull
+ // INVOKEVIRTUAL java/lang/Object.toString: 0: NotNull, 1: NotNull, 2: NotNull
+ // ARETURN: 0: NotNull, 1: NotNull, 2: Unknown
+
+ for ((insn, index, nullness) <- List(
+ ("+NEW", 2, Unknown), // new value at slot 2 on the stack
+ ("+DUP", 3, Unknown),
+ ("+INVOKESPECIAL java/lang/Object", 2, NotNull), // after calling the initializer on 3, the value at 2 becomes NotNull
+ ("ASTORE 1", 1, Unknown), // before the ASTORE 1, nullness of the value in local 1 is Unknown
+ ("+ASTORE 1", 1, NotNull), // after storing the value at 2 in local 1, the local 1 is NotNull
+ ("+ALOAD 1", 2, NotNull), // loading the value 1 puts a NotNull value on the stack (at 2)
+ ("+INVOKEVIRTUAL java/lang/Object.toString", 2, Unknown) // nullness of value returned by `toString` is Unknown
+ )) testNullness(a, m, insn, index, nullness)
+ }
+
+ @Test
+ def explicitNull(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("def f = { var a: Object = null; a }")
+ val a = newNullnessAnalyzer(m)
+ for ((insn, index, nullness) <- List(
+ ("+ACONST_NULL", 2, Null),
+ ("+ASTORE 1", 1, Null),
+ ("+ALOAD 1", 2, Null)
+ )) testNullness(a, m, insn, index, nullness)
+ }
+
+ @Test
+ def stringLiteralsNotNull(): Unit = {
+ val List(m) = compileMethods(noOptCompiler)("""def f = { val a = "hi"; a.trim }""")
+ val a = newNullnessAnalyzer(m)
+ testNullness(a, m, "+ASTORE 1", 1, NotNull)
+ }
+
+ @Test
+ def newArraynotNull() {
+ val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Array[Int](2); a(0) }")
+ val a = newNullnessAnalyzer(m)
+ testNullness(a, m, "+NEWARRAY T_INT", 2, NotNull) // new array on stack
+ testNullness(a, m, "+ASTORE 1", 1, NotNull) // local var (a)
+ }
+
+ @Test
+ def aliasBranching(): Unit = {
+ val code =
+ """def f(o: Object) = {
+ | var a: Object = o // a and o are aliases
+ | var b: Object = null
+ | var c: Object = null
+ | var d: Object = o
+ | if ("".trim == "") {
+ | b = o
+ | c = o // a, o, b, aliases
+ | d = null
+ | } else {
+ | b = a // a, o, b aliases
+ | d = null
+ | }
+ | b.toString // a, o, b aliases (so they become NotNull), but not c
+ | // d is null here, assinged in both branches.
+ |}
+ """.stripMargin
+ val List(m) = compileMethods(noOptCompiler)(code)
+ val a = newNullnessAnalyzer(m)
+
+ val trim = "INVOKEVIRTUAL java/lang/String.trim"
+ val toSt = "INVOKEVIRTUAL java/lang/Object.toString"
+ val end = s"+$toSt"
+ for ((insn, index, nullness) <- List(
+ (trim, 0, NotNull), // this
+ (trim, 1, Unknown), // parameter o
+ (trim, 2, Unknown), // a
+ (trim, 3, Null), // b
+ (trim, 4, Null), // c
+ (trim, 5, Unknown), // d
+
+ (toSt, 2, Unknown), // a, still the same
+ (toSt, 3, Unknown), // b, was re-assinged in both branches to Unknown
+ (toSt, 4, Unknown), // c, was re-assigned in one branch to Unknown
+ (toSt, 5, Null), // d, was assigned to null in both branches
+
+ (end, 2, NotNull), // a, NotNull (alias of b)
+ (end, 3, NotNull), // b, receiver of toString
+ (end, 4, Unknown), // c, no change (not an alias of b)
+ (end, 5, Null) // d, no change
+ )) testNullness(a, m, insn, index, nullness)
+ }
+
+ @Test
+ def testInstanceOf(): Unit = {
+ val code =
+ """def f(a: Object) = {
+ | val x = a
+ | x.isInstanceOf[Throwable] // x and a remain unknown - INSTANCEOF doesn't throw a NPE on null
+ | x.toString // x and a are not null
+ | a.asInstanceOf[String].trim // the stack value (LOAD of local a) is still not-null after the CHECKCAST
+ |}
+ """.stripMargin
+ val List(m) = compileMethods(noOptCompiler)(code)
+ val a = newNullnessAnalyzer(m)
+
+ val instof = "+INSTANCEOF"
+ val tost = "+INVOKEVIRTUAL java/lang/Object.toString"
+ val trim = "INVOKEVIRTUAL java/lang/String.trim"
+
+ for ((insn, index, nullness) <- List(
+ (instof, 1, Unknown), // a after INSTANCEOF
+ (instof, 2, Unknown), // x after INSTANCEOF
+ (tost, 1, NotNull),
+ (tost, 2, NotNull),
+ (trim, 3, NotNull) // receiver at `trim`
+ )) testNullness(a, m, insn, index, nullness)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
index 0fc3601603..b8c5f85c49 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
@@ -975,4 +975,20 @@ class InlinerTest extends ClearAfterClass {
val List(c) = compile(code)
assertInvoke(getSingleMethod(c, "t"), "java/lang/Error", "<init>")
}
+
+ @Test
+ def noRedunantNullChecks(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f: String = "hai!"
+ | def t(c: C) = {c.f; c.f} // null check on the first, but not the second
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ val t = getSingleMethod(c, "t").instructions
+ assertNoInvoke(t)
+ assert(2 == t.collect({case Ldc(_, "hai!") => }).size) // twice the body of f
+ assert(1 == t.collect({case Jump(IFNONNULL, _) => }).size) // one single null check
+ }
}
diff --git a/test/junit/scala/util/SortingTest.scala b/test/junit/scala/util/SortingTest.scala
new file mode 100644
index 0000000000..15a00c8903
--- /dev/null
+++ b/test/junit/scala/util/SortingTest.scala
@@ -0,0 +1,69 @@
+package scala.util
+
+import org.junit.Test
+import org.junit.Assert._
+import scala.math.{ Ordered, Ordering }
+import scala.reflect.ClassTag
+
+class SortingTest {
+ case class N(i: Int, j: Int) extends Ordered[N] { def compare(n: N) = if (i < n.i) -1 else if (i > n.i) 1 else 0 }
+
+ def mkA(n: Int, max: Int) = Array.tabulate(n)(i => N(util.Random.nextInt(max), i))
+
+ def isStable(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true }
+
+ def isAntistable(a: Array[N]): Boolean =
+ { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true }
+
+ def isSorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i) return false; i += 1 }; true }
+
+ def isAntisorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i) return false; i += 1 }; true }
+
+ val sizes = Seq.range(0, 65) ++ Seq(256, 1024, 9121, 65539)
+ val variety = Seq(1, 2, 10, 100, 1000, Int.MaxValue)
+ val workLimit = 1e6
+ val rng = new util.Random(198571)
+
+ val backwardsN = Ordering by ((n: N) => -n.i)
+
+ def runOneTest(size: Int, variety: Int): Unit = {
+ val xs = Array.tabulate(size)(i => N(rng.nextInt(variety), i))
+ val ys = Array.range(0, xs.length)
+ val zs = { val temp = xs.clone; java.util.Arrays.sort(temp, new java.util.Comparator[N] { def compare(a: N, b: N) = a.compare(b) }); temp }
+ val qxs = { val temp = xs.clone; Sorting.quickSort(temp); temp }
+ val pxs = { val temp = xs.clone; Sorting.quickSort(temp)(backwardsN); temp }
+ val sxs = { val temp = xs.clone; Sorting.stableSort(temp); temp }
+ val rxs = { val temp = xs.clone; Sorting.stableSort(temp)(implicitly[ClassTag[N]], backwardsN); temp }
+ val sys = Sorting.stableSort(ys.clone: Seq[Int], (i: Int) => xs(i))
+
+ assertTrue("Quicksort should be in order", isSorted(qxs))
+ assertTrue("Quicksort should be in reverse order", isAntisorted(pxs))
+ assertTrue("Stable sort should be sorted and stable", isStable(sxs))
+ assertTrue("Stable sort should be reverse sorted but stable", isAntistable(rxs))
+ assertTrue("Stable sorting by proxy should produce sorted stable list", isStable(sys.map(i => xs(i))))
+ assertTrue("Quicksort should produce canonical ordering", (qxs zip zs).forall{ case (a,b) => a.i == b.i })
+ assertTrue("Reverse quicksort should produce canonical ordering", (pxs.reverse zip zs).forall{ case (a,b) => a.i == b.i })
+ assertTrue("Stable sort should produce exact ordering", (sxs zip zs).forall{ case (a,b) => a == b })
+ assertTrue("Reverse stable sort should produce canonical ordering", (rxs.reverse zip zs).forall{ case (a,b) => a.i == b.i })
+ assertTrue("Proxy sort and direct sort should produce exactly the same thing", (sxs zip sys.map(i => xs(i))).forall{ case (a,b) => a == b })
+ }
+
+ @Test def testSortConsistency: Unit = {
+ for {
+ size <- sizes
+ v <- variety
+ i <- 0 until math.min(100, math.max(math.min(math.floor(math.pow(v, size)/2), math.ceil(workLimit / (math.log(math.max(2,size))/math.log(2) * size))), 1).toInt)
+ } runOneTest(size, v)
+
+ for (size <- sizes) {
+ val b = Array.fill(size)(rng.nextBoolean)
+ val bfwd = Sorting.stableSort(b.clone: Seq[Boolean])
+ val bbkw = Sorting.stableSort(b.clone: Seq[Boolean], (x: Boolean, y: Boolean) => x && !y)
+ assertTrue("All falses should be first", bfwd.dropWhile(_ == false).forall(_ == true))
+ assertTrue("All falses should be last when sorted backwards", bbkw.dropWhile(_ == true).forall(_ == false))
+ assertTrue("Sorting booleans should preserve the number of trues", b.count(_ == true) == bfwd.count(_ == true))
+ assertTrue("Backwards sorting booleans should preserve the number of trues", b.count(_ == true) == bbkw.count(_ == true))
+ assertTrue("Sorting should not change the sizes of arrays", b.length == bfwd.length && b.length == bbkw.length)
+ }
+ }
+}
diff --git a/versions.properties b/versions.properties
index 406690861e..a7ec8caedc 100644
--- a/versions.properties
+++ b/versions.properties
@@ -33,7 +33,7 @@ scala-swing.version.number=1.0.2
akka-actor.version.number=2.3.10
actors-migration.version.number=1.1.0
jline.version=2.12.1
-scala-asm.version=5.0.3-scala-3
+scala-asm.version=5.0.4-scala-1
# external modules, used internally (not shipped)
partest.version.number=1.0.7