summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJason Zaugg <jzaugg@gmail.com>2015-06-24 15:19:05 +1000
committerJason Zaugg <jzaugg@gmail.com>2015-06-24 18:10:42 +1000
commit1f7417c763fb199cacc6afedc6e54796916fd673 (patch)
treebd00d7e0e47be310172f01c758c01ca5430063c4 /src
parent73f40564a6b19e8b15f0908c3e24f1a8fe405605 (diff)
parent1b09e12ef3b3fea1cab56bac893295f74de23b8b (diff)
downloadscala-1f7417c763fb199cacc6afedc6e54796916fd673.tar.gz
scala-1f7417c763fb199cacc6afedc6e54796916fd673.tar.bz2
scala-1f7417c763fb199cacc6afedc6e54796916fd673.zip
Merge branch '2.11.x' into merge/2.11.x-to-2.12.x-20150624
Diffstat (limited to 'src')
-rw-r--r--src/build/genprod.scala13
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Holes.scala2
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Reifiers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala25
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala52
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala43
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala53
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala28
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala251
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala265
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala282
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala38
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala72
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala240
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala24
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala10
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala5
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala152
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala4
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala7
-rw-r--r--src/library/scala/Function2.scala6
-rw-r--r--src/library/scala/collection/Iterator.scala32
-rw-r--r--src/library/scala/collection/generic/Sorted.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala8
-rw-r--r--src/library/scala/collection/immutable/Queue.scala9
-rw-r--r--src/library/scala/collection/immutable/Stream.scala8
-rw-r--r--src/library/scala/collection/immutable/Vector.scala17
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/math/Numeric.scala6
-rw-r--r--src/library/scala/sys/BooleanProp.scala7
-rw-r--r--src/library/scala/util/Sorting.scala712
-rw-r--r--src/library/scala/util/Try.scala7
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala12
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala2
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala6
-rw-r--r--src/reflect/scala/reflect/internal/ReificationSupport.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala3
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala (renamed from src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala)35
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala25
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala77
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala143
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Delimited.scala41
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Formatting.scala27
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala171
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala67
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala24
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineReader.scala75
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Parsed.scala19
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Pasted.scala22
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Tabulators.scala (renamed from src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala)59
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/History.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala49
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/package.scala5
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala14
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala2
102 files changed, 2343 insertions, 1136 deletions
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index ed436fe2e4..b470348e8c 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -123,7 +123,10 @@ object FunctionOne extends Function(1) {
* def apply(x: Int): Int = x + 1
* }
* assert(succ(0) == anonfun1(0))
- * """)
+ * """) + """
+ *
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle."""
override def moreMethods = """
/** Composes two instances of Function1 in a new Function1, with this function applied last.
@@ -178,13 +181,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
*
* {{{
* object Main extends App {%s}
- * }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-"""
+ * }}}"""
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
diff --git a/src/compiler/scala/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala
index 6fa6b9b37a..47084fc317 100644
--- a/src/compiler/scala/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala
@@ -151,7 +151,7 @@ trait Holes { self: Quasiquotes =>
else None
}
- /** Map high-rank unquotee onto an expression that eveluates as a list of given rank.
+ /** Map high-rank unquotee onto an expression that evaluates as a list of given rank.
*
* All possible combinations of representations are given in the table below:
*
diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
index e753c9787a..8462debe21 100644
--- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
@@ -322,7 +322,7 @@ trait Reifiers { self: Quasiquotes =>
* in the domain of the fill function;
*
* 2. fold the groups into a sequence of lists added together with ++ using
- * fill reification for holeMapĀ and fallback reification for non-holeMap.
+ * fill reification for holeMap and fallback reification for non-holeMap.
*
* Example:
*
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 72e96df37c..778f2fed59 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -10,7 +10,6 @@ package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
@@ -1490,6 +1489,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
compileUnitsInternal(units, fromPhase)
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
+ def currentTime = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime())
+
units foreach addUnit
val startTime = currentTime
@@ -1677,23 +1678,25 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def getFile(clazz: Symbol, suffix: String): File = getFile(clazz.sourceFile, clazz.fullName split '.', suffix)
private def writeICode() {
- val printer = new icodes.TextPrinter(null, icodes.linearizer)
- icodes.classes.values.foreach((cls) => {
- val moduleSfx = if (cls.symbol.hasModuleFlag) "$" else ""
- val phaseSfx = if (settings.debug) phase else "" // only for debugging, appending the full phasename breaks windows build
- val file = getFile(cls.symbol, s"$moduleSfx$phaseSfx.icode")
+ val printer = new icodes.TextPrinter(writer = null, icodes.linearizer)
+ icodes.classes.values foreach { cls =>
+ val file = {
+ val module = if (cls.symbol.hasModuleFlag) "$" else ""
+ val faze = if (settings.debug) phase.name else f"${phase.id}%02d" // avoid breaking windows build with long filename
+ getFile(cls.symbol, s"$module-$faze.icode")
+ }
try {
val stream = new FileOutputStream(file)
printer.setWriter(new PrintWriter(stream, true))
printer.printClass(cls)
- informProgress("wrote " + file)
+ informProgress(s"wrote $file")
} catch {
- case ex: IOException =>
- if (settings.debug) ex.printStackTrace()
- globalError("could not write file " + file)
+ case e: IOException =>
+ if (settings.debug) e.printStackTrace()
+ globalError(s"could not write file $file")
}
- })
+ }
}
def createJavadoc = false
}
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index 4b32aab5ee..ef9818c62d 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -226,7 +226,7 @@ trait PhaseAssembly {
}
/** Given the phases set, will build a dependency graph from the phases set
- * Using the aux. method of the DependencyGraph to create nodes and egdes.
+ * Using the aux. method of the DependencyGraph to create nodes and edges.
*/
private def phasesSetToDepGraph(phsSet: mutable.HashSet[SubComponent]): DependencyGraph = {
val graph = new DependencyGraph()
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 9f160e2485..ca7d8776d4 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -13,7 +13,7 @@ object Properties extends scala.util.PropertiesTrait {
// settings based on jar properties, falling back to System prefixed by "scala."
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
- def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
+ def shellPromptString = scalaPropOrElse("shell.prompt", "%nscala> ")
// message to display at EOF (which by default ends with
// a newline so as not to break the user's terminal)
def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator")
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 055f658670..c70690e697 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -76,9 +76,9 @@ trait DocComments { self: Global =>
superComment(sym) match {
case None =>
- if (ownComment.indexOf("@inheritdoc") != -1)
- reporter.warning(sym.pos, "The comment for " + sym +
- " contains @inheritdoc, but no parent comment is available to inherit from.")
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ reporter.warning(sym.pos, s"The comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
case Some(sc) =>
if (ownComment == "") sc
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index cbe6239bb2..01eff71057 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1022,7 +1022,7 @@ abstract class GenICode extends SubComponent {
tree match {
case Literal(Constant(null)) if generatedType == NullReference && expectedType != UNIT =>
// literal null on the stack (as opposed to a boxed null, see SI-8233),
- // we can bypass `adapt` which would otherwise emitt a redundant [DROP, CONSTANT(null)]
+ // we can bypass `adapt` which would otherwise emit a redundant [DROP, CONSTANT(null)]
// except one case: when expected type is UNIT (unboxed) where we need to emit just a DROP
case _ =>
adapt(generatedType, expectedType, resCtx, tree.pos)
@@ -2108,7 +2108,7 @@ abstract class GenICode extends SubComponent {
/**
* Represent a label in the current method code. In order
* to support forward jumps, labels can be created without
- * having a deisgnated target block. They can later be attached
+ * having a designated target block. They can later be attached
* by calling `anchor`.
*/
class Label(val symbol: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 058b6a161d..64c9901a3e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -387,7 +387,7 @@ abstract class TypeFlowAnalysis {
Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block.
There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't.
- The reasoning behind this decsision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
+ The reasoning behind this decision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
is querying `isOnPerimeter`.
Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
index eadc404bee..dec5adc9aa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
@@ -9,6 +9,7 @@ package backend.jvm
import scala.tools.nsc.Global
import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo, InlineInfo}
import BackendReporting.ClassSymbolInfoFailureSI9111
+import scala.tools.asm
/**
* This trait contains code shared between GenBCode and GenASM that depends on types defined in
@@ -229,6 +230,44 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
}
/**
+ * Reconstruct the classfile flags from a Java defined class symbol.
+ *
+ * The implementation of this method is slightly different that `javaFlags` in BTypesFromSymbols.
+ * The javaFlags method is primarily used to map Scala symbol flags to sensible classfile flags
+ * that are used in the generated classfiles. For example, all classes emitted by the Scala
+ * compiler have ACC_PUBLIC.
+ *
+ * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have
+ * to correspond exactly to the flags in the classfile. For example, if the class is package
+ * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the
+ * ClassBType. For example, the inliner needs the correct flags for access checks.
+ *
+ * Class flags are listed here:
+ * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1
+ */
+ def javaClassfileFlags(classSym: Symbol): Int = {
+ assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}")
+ import asm.Opcodes._
+ def enumFlags = ACC_ENUM | {
+ // Java enums have the `ACC_ABSTRACT` flag if they have a deferred method.
+ // We cannot trust `hasAbstractFlag`: the ClassfileParser adds `ABSTRACT` and `SEALED` to all
+ // Java enums for exhaustiveness checking.
+ val hasAbstractMethod = classSym.info.decls.exists(s => s.isMethod && s.isDeferred)
+ if (hasAbstractMethod) ACC_ABSTRACT else 0
+ }
+ GenBCode.mkFlags(
+ if (classSym.isPublic) ACC_PUBLIC else 0,
+ if (classSym.isFinal) ACC_FINAL else 0,
+ // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces.
+ if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER,
+ // for Java enums, we cannot trust `hasAbstractFlag` (see comment in enumFlags)
+ if (!classSym.hasEnumFlag && classSym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (classSym.isArtifact) ACC_SYNTHETIC else 0,
+ if (classSym.hasEnumFlag) enumFlags else 0
+ )
+ }
+
+ /**
* The member classes of a class symbol. Note that the result of this method depends on the
* current phase, for example, after lambdalift, all local classes become member of the enclosing
* class.
@@ -399,3 +438,16 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
InlineInfo(traitSelfType, isEffectivelyFinal, methodInlineInfos, warning)
}
}
+
+object BCodeAsmCommon {
+ /**
+ * Valid flags for InnerClass attribute entry.
+ * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
+ */
+ val INNER_CLASSES_FLAGS = {
+ asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE |
+ asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION |
+ asm.Opcodes.ACC_ENUM
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 4b95f935b1..c3f71969f6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -33,7 +33,6 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
*/
abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
-
import icodes.TestOp
import icodes.opcodes.InvokeStyle
@@ -1287,38 +1286,42 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol) {
val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC)
+ def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType
- val targetHandle =
+ val implMethodHandle =
new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else asm.Opcodes.H_INVOKEVIRTUAL,
classBTypeFromSymbol(lambdaTarget.owner).internalName,
lambdaTarget.name.toString,
asmMethodType(lambdaTarget).descriptor)
- val receiver = if (isStaticMethod) None else Some(lambdaTarget.owner)
+ val receiver = if (isStaticMethod) Nil else lambdaTarget.owner :: Nil
val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity)
// Requires https://github.com/scala/scala-java8-compat on the runtime classpath
- val returnUnit = lambdaTarget.info.resultType.typeSymbol == UnitClass
- val functionalInterfaceDesc: String = classBTypeFromSymbol(functionalInterface).descriptor
- val desc = (receiver.toList ::: capturedParams).map(sym => toTypeKind(sym.info)).mkString(("("), "", ")") + functionalInterfaceDesc
+ val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), (receiver ::: capturedParams).map(sym => toTypeKind(sym.info).toASMType): _*)
- // TODO specialization
val constrainedType = new MethodBType(lambdaParams.map(p => toTypeKind(p.tpe)), toTypeKind(lambdaTarget.tpe.resultType)).toASMType
- val abstractMethod = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply))
- val methodName = abstractMethod.name.toString
- val applyN = {
- val mt = asmMethodType(abstractMethod)
- mt.toASMType
- }
-
- bc.jmethod.visitInvokeDynamicInsn(methodName, desc, lambdaMetaFactoryBootstrapHandle,
- // boostrap args
- applyN, targetHandle, constrainedType
+ val sam = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply))
+ val samName = sam.name.toString
+ val samMethodType = asmMethodType(sam).toASMType
+
+ val flags = 3 // TODO 2.12.x Replace with LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS
+
+ val ScalaSerializable = classBTypeFromSymbol(definitions.SerializableClass).toASMType
+ bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryBootstrapHandle,
+ /* samMethodType = */ samMethodType,
+ /* implMethod = */ implMethodHandle,
+ /* instantiatedMethodType = */ constrainedType,
+ /* flags = */ flags.asInstanceOf[AnyRef],
+ /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef],
+ /* markerInterfaces[0] = */ ScalaSerializable,
+ /* bridgeCount = */ 0.asInstanceOf[AnyRef]
)
+ indyLambdaHosts += this.claszSymbol
}
}
- val lambdaMetaFactoryBootstrapHandle =
+ lazy val lambdaMetaFactoryBootstrapHandle =
new asm.Handle(asm.Opcodes.H_INVOKESTATIC,
- "java/lang/invoke/LambdaMetafactory", "metafactory",
- "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;")
+ definitions.LambdaMetaFactory.fullName('/'), sn.AltMetafactory.toString,
+ "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;")
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
index 18468f5ae3..6aa3a62295 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -682,6 +682,59 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
new java.lang.Long(id)
).visitEnd()
}
+
+ /**
+ * Add:
+ * private static java.util.Map $deserializeLambdaCache$ = null
+ * private static Object $deserializeLambda$(SerializedLambda l) {
+ * var cache = $deserializeLambdaCache$
+ * if (cache eq null) {
+ * cache = new java.util.HashMap()
+ * $deserializeLambdaCache$ = cache
+ * }
+ * return scala.compat.java8.runtime.LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), cache, l);
+ * }
+ */
+ def addLambdaDeserialize(clazz: Symbol, jclass: asm.ClassVisitor): Unit = {
+ val cw = jclass
+ import scala.tools.asm.Opcodes._
+
+ // Need to force creation of BTypes for these as `getCommonSuperClass` is called on
+ // automatically computing the max stack size (`visitMaxs`) during method writing.
+ javaUtilHashMapReference
+ javaUtilMapReference
+
+ cw.visitInnerClass("java/lang/invoke/MethodHandles$Lookup", "java/lang/invoke/MethodHandles", "Lookup", ACC_PUBLIC + ACC_FINAL + ACC_STATIC)
+
+ {
+ val fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambdaCache$", "Ljava/util/Map;", null, null)
+ fv.visitEnd()
+ }
+
+ {
+ val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", "(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", null, null)
+ mv.visitCode()
+ mv.visitFieldInsn(GETSTATIC, clazz.javaBinaryName.encoded, "$deserializeLambdaCache$", "Ljava/util/Map;")
+ mv.visitVarInsn(ASTORE, 1)
+ mv.visitVarInsn(ALOAD, 1)
+ val l0 = new asm.Label()
+ mv.visitJumpInsn(IFNONNULL, l0)
+ mv.visitTypeInsn(NEW, "java/util/HashMap")
+ mv.visitInsn(DUP)
+ mv.visitMethodInsn(INVOKESPECIAL, "java/util/HashMap", "<init>", "()V", false)
+ mv.visitVarInsn(ASTORE, 1)
+ mv.visitVarInsn(ALOAD, 1)
+ mv.visitFieldInsn(PUTSTATIC, clazz.javaBinaryName.encoded, "$deserializeLambdaCache$", "Ljava/util/Map;")
+ mv.visitLabel(l0)
+ mv.visitFrame(asm.Opcodes.F_APPEND,1, Array("java/util/Map"), 0, null)
+ mv.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;", false)
+ mv.visitVarInsn(ALOAD, 1)
+ mv.visitVarInsn(ALOAD, 0)
+ mv.visitMethodInsn(INVOKESTATIC, "scala/compat/java8/runtime/LambdaDeserializer", "deserializeLambda", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/util/Map;Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", false)
+ mv.visitInsn(ARETURN)
+ mv.visitEnd()
+ }
+ }
} // end of trait BCClassGen
/* functionality for building plain and mirror classes */
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
index 2a06c62e37..0f67852804 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -68,6 +68,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
var isCZStaticModule = false
var isCZRemote = false
+ protected val indyLambdaHosts = collection.mutable.Set[Symbol]()
+
/* ---------------- idiomatic way to ask questions to typer ---------------- */
def paramTKs(app: Apply): List[BType] = {
@@ -88,7 +90,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
override def getCurrentCUnit(): CompilationUnit = { cunit }
- /* ---------------- helper utils for generating classes and fiels ---------------- */
+ /* ---------------- helper utils for generating classes and fields ---------------- */
def genPlainClass(cd: ClassDef) {
assert(cnode == null, "GenBCode detected nested methods.")
@@ -121,6 +123,16 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
innerClassBufferASM ++= classBType.info.get.nestedClasses
gen(cd.impl)
+
+
+ val shouldAddLambdaDeserialize = (
+ settings.target.value == "jvm-1.8"
+ && settings.Ydelambdafy.value == "method"
+ && indyLambdaHosts.contains(claszSymbol))
+
+ if (shouldAddLambdaDeserialize)
+ addLambdaDeserialize(claszSymbol, cnode)
+
addInnerClassesASM(cnode, innerClassBufferASM.toList)
cnode.visitAttribute(classBType.inlineInfoAttribute.get)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
index e61190bf3a..176292669c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -898,7 +898,7 @@ abstract class BTypes {
// the static flag in the InnerClass table has a special meaning, see InnerClass comment
i.flags & ~Opcodes.ACC_STATIC,
if (isStaticNestedClass) Opcodes.ACC_STATIC else 0
- ) & ClassBType.INNER_CLASSES_FLAGS
+ ) & BCodeAsmCommon.INNER_CLASSES_FLAGS
)
})
@@ -987,17 +987,6 @@ abstract class BTypes {
}
object ClassBType {
- /**
- * Valid flags for InnerClass attribute entry.
- * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
- */
- private val INNER_CLASSES_FLAGS = {
- asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE |
- asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION |
- asm.Opcodes.ACC_ENUM
- }
-
// Primitive classes have no super class. A ClassBType for those is only created when
// they are actually being compiled (e.g., when compiling scala/Boolean.scala).
private val hasNoSuper = Set(
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
index 1b9fd5e298..d68c916f09 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
@@ -230,7 +230,10 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol)
- val flags = javaFlags(classSym)
+ val flags = {
+ if (classSym.isJava) javaClassfileFlags(classSym) // see comment on javaClassfileFlags
+ else javaFlags(classSym)
+ }
/* The InnerClass table of a class C must contain all nested classes of C, even if they are only
* declared but not otherwise referenced in C (from the bytecode or a method / field signature).
@@ -290,7 +293,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
val javaCompatMembers = {
if (linkedClass != NoSymbol && isTopLevelModuleClass(linkedClass))
// phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only sees member
- // classes, not local classes of the companion module (E in the exmaple) that were lifted by lambdalift.
+ // classes, not local classes of the companion module (E in the example) that were lifted by lambdalift.
exitingPickler(memberClassesForInnerClassTable(linkedClass))
else
Nil
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
index 492fe3ae79..00ca096e59 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
@@ -114,6 +114,8 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
lazy val jioSerializableReference : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable
lazy val scalaSerializableReference : ClassBType = classBTypeFromSymbol(SerializableClass) // scala/Serializable
lazy val classCastExceptionReference : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException
+ lazy val javaUtilMapReference : ClassBType = classBTypeFromSymbol(JavaUtilMap) // java/util/Map
+ lazy val javaUtilHashMapReference : ClassBType = classBTypeFromSymbol(JavaUtilHashMap) // java/util/HashMap
lazy val srBooleanRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BooleanRef])
lazy val srByteRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.ByteRef])
@@ -258,6 +260,8 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes:
def jioSerializableReference : ClassBType = _coreBTypes.jioSerializableReference
def scalaSerializableReference : ClassBType = _coreBTypes.scalaSerializableReference
def classCastExceptionReference : ClassBType = _coreBTypes.classCastExceptionReference
+ def javaUtilMapReference : ClassBType = _coreBTypes.javaUtilMapReference
+ def javaUtilHashMapReference : ClassBType = _coreBTypes.javaUtilHashMapReference
def srBooleanRef : ClassBType = _coreBTypes.srBooleanRef
def srByteRef : ClassBType = _coreBTypes.srByteRef
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 9a7200b686..ccad50616c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -476,10 +476,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
-
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -614,18 +610,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case Some(oldsym) if oldsym.exists && trackedSym.exists =>
- assert(
- // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
- s"""|Different class symbols have the same bytecode-level internal name:
- | name: $internalName
- | oldsym: ${oldsym.fullNameString}
- | tracked: ${trackedSym.fullNameString}
- """.stripMargin
- )
- case _ =>
+ case None =>
reverseJavaName.put(internalName, trackedSym)
+ case Some(oldsym) =>
+ // TODO: `duplicateOk` seems pretty ad-hoc (a more aggressive version caused SI-9356 because it called oldSym.exists, which failed in the unpickler; see also SI-5031)
+ def duplicateOk = oldsym == NoSymbol || trackedSym == NoSymbol || (syntheticCoreClasses contains oldsym) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule))
+ if (oldsym != trackedSym && !duplicateOk)
+ devWarning(s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}""".stripMargin)
}
}
@@ -755,9 +749,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
val flagsWithFinal: Int = mkFlags(
// See comment in BTypes, when is a class marked static in the InnerClass table.
if (isOriginallyStaticOwner(innerSym.originalOwner)) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
+ (if (innerSym.isJava) javaClassfileFlags(innerSym) else javaFlags(innerSym)) & ~asm.Opcodes.ACC_STATIC,
if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ ) & (BCodeAsmCommon.INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
val jname = javaName(innerSym) // never null
val oname = outerName(innerSym) // null when method-enclosed
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
new file mode 100644
index 0000000000..7bbe1e2a49
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
@@ -0,0 +1,251 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.collection.{mutable, immutable}
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter}
+import opt.BytecodeUtils._
+
+object AliasingFrame {
+ private var _idCounter: Long = 0l
+ private def nextId = { _idCounter += 1; _idCounter }
+}
+
+class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) {
+ import Opcodes._
+
+ // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame`
+ def this(src: Frame[_ <: V]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ /**
+ * For each slot (entry in the `values` array of the frame), an id that uniquely represents
+ * the object stored in it. If two values have the same id, they are aliases of the same
+ * object.
+ */
+ private val aliasIds: Array[Long] = Array.fill(nLocals + nStack)(AliasingFrame.nextId)
+
+ /**
+ * The object alias id of for a value index.
+ */
+ def aliasId(entry: Int) = aliasIds(entry)
+
+ /**
+ * Returns the indices of the values array which are aliases of the object `id`.
+ */
+ def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.iterator.filter(i => aliasId(i) == id)
+
+ /**
+ * The set of aliased values for a given entry in the `values` array.
+ */
+ def aliasesOf(entry: Int): Set[Int] = valuesWithAliasId(aliasIds(entry))
+
+ /**
+ * Define a new alias. For example, given
+ * var a = this // this, a have the same aliasId
+ * then an assignment
+ * b = a
+ * will set the same the aliasId for `b`.
+ */
+ private def newAlias(assignee: Int, source: Int): Unit = {
+ aliasIds(assignee) = aliasIds(source)
+ }
+
+ /**
+ * An assignment
+ * a = someUnknownValue()
+ * sets a fresh alias id for `a`.
+ * A stack value is also removed from its alias set when being consumed.
+ */
+ private def removeAlias(assignee: Int): Unit = {
+ aliasIds(assignee) = AliasingFrame.nextId
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = {
+ // Make the extendsion methods easier to use (otherwise we have to repeat `this`.stackTop)
+ def stackTop: Int = this.stackTop
+ def peekStack(n: Int): V = this.peekStack(n)
+
+ // the val pattern `val (p, c) = f` still allocates a tuple (https://github.com/scala-opt/scala/issues/28)
+ val prodCons = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc
+ val consumed = prodCons._1
+ val produced = prodCons._2
+
+ super.execute(insn, interpreter)
+
+ (insn.getOpcode: @switch) match {
+ case ALOAD =>
+ newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`)
+
+ case DUP =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+
+ case DUP_X1 =>
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+
+ case DUP_X2 =>
+ // Check if the second element on the stack is size 2
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2
+ val isSize2 = peekStack(1).getSize == 2
+ val top = stackTop
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ if (isSize2) {
+ // Size 2 values on the stack only take one slot in the `values` array
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ } else {
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top, source = top - 2)
+ }
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ val top = stackTop
+ if (isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+
+ case DUP2_X2 =>
+ val top = stackTop
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ newAlias(assignee = top, source = top - 1)
+ newAlias(assignee = top - 1, source = top - 2)
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) {
+ // Form 4
+ newAlias(assignee = top - 2, source = top)
+ } else {
+ // Form 2
+ newAlias(assignee = top - 2, source = top - 3)
+ newAlias(assignee = top - 3, source = top)
+ }
+ } else {
+ newAlias(assignee = top, source = top - 2)
+ newAlias(assignee = top - 1, source = top - 3)
+ newAlias(assignee = top - 2, source = top - 4)
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) {
+ // Form 3
+ newAlias(assignee = top - 3, source = top)
+ newAlias(assignee = top - 4, source = top - 1)
+ } else {
+ // Form 1
+ newAlias(assignee = top - 3, source = top - 5)
+ newAlias(assignee = top - 4, source = top)
+ newAlias(assignee = top - 5, source = top - 1)
+ }
+ }
+
+ case SWAP =>
+ val top = stackTop
+ val idTop = aliasIds(top)
+ aliasIds(top) = aliasIds(top - 1)
+ aliasIds(top - 1) = idTop
+
+ case opcode =>
+ if (opcode == ASTORE) {
+ // Not a separate case because we need to remove the consumed stack value from alias sets after.
+ val stackTopBefore = stackTop - produced + consumed
+ val local = insn.asInstanceOf[VarInsnNode].`var`
+ newAlias(assignee = local, source = stackTopBefore)
+ // if the value written is size 2, it overwrites the subsequent slot, which is then no
+ // longer an alias of anything. see the corresponding case in `Frame.execute`.
+ if (getLocal(local).getSize == 2)
+ removeAlias(local + 1)
+
+ // if the value at the preceding index is size 2, it is no longer valid, so we remove its
+ // aliasing. see corresponding case in `Frame.execute`
+ if (local > 0) {
+ val precedingValue = getLocal(local - 1)
+ if (precedingValue != null && precedingValue.getSize == 2)
+ removeAlias(local - 1)
+ }
+ }
+
+ // Remove consumed stack values from aliasing sets.
+ // Example: iadd
+ // - before: local1, local2, stack1, consumed1, consumed2
+ // - after: local1, local2, stack1, produced1 // stackTop = 3
+ val firstConsumed = stackTop - produced + 1 // firstConsumed = 3
+ for (i <- 0 until consumed)
+ removeAlias(firstConsumed + i) // remove aliases for 3 and 4
+
+ // We don't need to set the aliases ids for the produced values: the aliasIds array already
+ // contains fresh ids for non-used stack values (ensured by removeAlias).
+ }
+ }
+
+ /**
+ * Merge the AliasingFrame `other` into this AliasingFrame.
+ *
+ * Aliases that are common in both frames are kept. Example:
+ *
+ * var x, y = null
+ * if (...) {
+ * x = a
+ * y = a // (x, y, a) are aliases
+ * } else {
+ * x = a
+ * y = b // (x, a) and (y, b)
+ * }
+ * [...] // (x, a)
+ */
+ override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = {
+ val valuesChanged = super.merge(other, interpreter)
+ var aliasesChanged = false
+ val aliasingOther = other.asInstanceOf[AliasingFrame[_]]
+ for (i <- aliasIds.indices) {
+ val thisAliases = aliasesOf(i)
+ val thisNotOther = thisAliases diff (thisAliases intersect aliasingOther.aliasesOf(i))
+ if (thisNotOther.nonEmpty) {
+ aliasesChanged = true
+ thisNotOther foreach removeAlias
+ }
+ }
+ valuesChanged || aliasesChanged
+ }
+
+ override def init(src: Frame[_ <: V]): Frame[V] = {
+ super.init(src)
+ compat.Platform.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliasIds, 0, aliasIds, 0, aliasIds.length)
+ this
+ }
+}
+
+/**
+ * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis
+ * needs to track aliases, but doesn't require a more specific Frame subclass.
+ */
+class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
new file mode 100644
index 0000000000..98e93c125b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
@@ -0,0 +1,265 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.annotation.switch
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.Type
+import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Value}
+import opt.BytecodeUtils._
+import collection.immutable
+
+object InstructionStackEffect {
+ private var cache: immutable.IntMap[(Int, Int)] = immutable.IntMap.empty
+ private def t(x: Int, y: Int): (Int, Int) = {
+ // x can go up to 255 (number of parameters of a method, dimensions in multianewarray) we cache
+ // x up to 10, which covers most cases and limits the cache. y doesn't go above 6 (see cases).
+ if (x > 10 || y > 6) (x, y)
+ else {
+ val key = (x << 8) + y // this would work for any x < 256
+ if (cache contains key) {
+ cache(key)
+ } else {
+ val r = (x, y)
+ cache += key -> r
+ r
+ }
+ }
+ }
+
+ /**
+ * Returns a pair with the number of stack values consumed and produced by `insn`.
+ * This method requires the `frame` to be in the state **before** executing / interpreting
+ * the `insn`.
+ */
+ def apply[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): (Int, Int) = {
+ def peekStack(n: Int): V = frame.peekStack(n)
+
+ (insn.getOpcode: @switch) match {
+ // The order of opcodes is the same as in Frame.execute.
+ case NOP => t(0, 0)
+
+ case ACONST_NULL |
+ ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 |
+ LCONST_0 |
+ LCONST_1 |
+ FCONST_0 |
+ FCONST_1 |
+ FCONST_2 |
+ DCONST_0 |
+ DCONST_1 |
+ BIPUSH |
+ SIPUSH |
+ LDC |
+ ILOAD |
+ LLOAD |
+ FLOAD |
+ DLOAD |
+ ALOAD => t(0, 1)
+
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => t(2, 1)
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => t(1, 0)
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => t(3, 0)
+
+ case POP => t(1, 0)
+
+ case POP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(1, 0) else t(2, 0)
+
+ case DUP => t(0, 1)
+
+ case DUP_X1 => t(2, 3)
+
+ case DUP_X2 =>
+ val isSize2 = peekStack(1).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(0, 1) else t(0, 2)
+
+ case DUP2_X1 =>
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+
+ case DUP2_X2 =>
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) t(2, 3) else t(3, 4)
+ } else {
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) t(3, 5) else t(4, 6)
+ }
+
+ case SWAP => t(2, 2)
+
+ case IADD |
+ LADD |
+ FADD |
+ DADD |
+ ISUB |
+ LSUB |
+ FSUB |
+ DSUB |
+ IMUL |
+ LMUL |
+ FMUL |
+ DMUL |
+ IDIV |
+ LDIV |
+ FDIV |
+ DDIV |
+ IREM |
+ LREM |
+ FREM |
+ DREM => t(2, 1)
+
+ case INEG |
+ LNEG |
+ FNEG |
+ DNEG => t(1, 1)
+
+ case ISHL |
+ LSHL |
+ ISHR |
+ LSHR |
+ IUSHR |
+ LUSHR |
+ IAND |
+ LAND |
+ IOR |
+ LOR |
+ IXOR |
+ LXOR => t(2, 1)
+
+ case IINC => t(0, 0)
+
+ case I2L |
+ I2F |
+ I2D |
+ L2I |
+ L2F |
+ L2D |
+ F2I |
+ F2L |
+ F2D |
+ D2I |
+ D2L |
+ D2F |
+ I2B |
+ I2C |
+ I2S => t(1, 1)
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => t(2, 1)
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => t(1, 0)
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => t(2, 0)
+
+ case GOTO => t(0, 0)
+
+ case JSR => t(0, 1)
+
+ case RET => t(0, 0)
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => t(1, 0)
+
+ case IRETURN |
+ LRETURN |
+ FRETURN |
+ DRETURN |
+ ARETURN => t(1, 0) // Frame.execute consumes one stack value
+
+ case RETURN => t(0, 0) // Frame.execute does not change the stack
+
+ case GETSTATIC => t(0, 1)
+
+ case PUTSTATIC => t(1, 0)
+
+ case GETFIELD => t(1, 1)
+
+ case PUTFIELD => t(2, 0)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1)
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case INVOKEDYNAMIC =>
+ val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc
+ val cons = Type.getArgumentTypes(desc).length
+ val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+
+ case NEW => t(0, 1)
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => t(1, 1)
+
+ case ATHROW => t(1, 0) // Frame.execute consumes one stack value
+
+ case CHECKCAST |
+ INSTANCEOF => t(1, 1) // Frame.execute does push(pop()) for both of them
+
+ case MONITORENTER |
+ MONITOREXIT => t(1, 0)
+
+ case MULTIANEWARRAY => t(insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1)
+
+ case IFNULL |
+ IFNONNULL => t(1, 0)
+ }
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
new file mode 100644
index 0000000000..31710dcbee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
@@ -0,0 +1,282 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import java.util
+
+import scala.annotation.switch
+import scala.tools.asm.{Type, Opcodes}
+import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree.analysis.{Frame, Analyzer, Interpreter, Value}
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils
+import BytecodeUtils._
+
+/**
+ * Some notes on the ASM ananlyzer framework.
+ *
+ * Value
+ * - Abstract, needs to be implemented for each analysis.
+ * - Represents the desired information about local variables and stack values, for example:
+ * - Is this value known to be null / not null?
+ * - What are the instructions that could potentially have produced this value?
+ *
+ * Interpreter
+ * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing
+ * interpreter, e.g., SourceInterpreter or BasicInterpreter.
+ * - Multiple abstract methods that receive an instruction and the instruction's input values, and
+ * return a value representing the result of that instruction.
+ * - Note: due to control flow, the interpreter can be invoked multiple times for the same
+ * instruction, until reaching a fixed point.
+ * - Abstract `merge` function that computes the least upper bound of two values. Used by
+ * Frame.merge (see below).
+ *
+ * Frame
+ * - Can be used directly for many analyses, no subclass required.
+ * - Every frame has an array of values: one for each local variable and for each stack slot.
+ * - A `top` index stores the index of the current stack top
+ * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty
+ * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next
+ * stack value.
+ * - Defines the `execute(instruction)` method.
+ * - executing mutates the state of the frame according to the effect of the instruction
+ * - pop consumed values from the stack
+ * - pass them to the interpreter together with the instruction
+ * - if applicable, push the resulting value on the stack
+ * - Defines the `merge(otherFrame)` method
+ * - called by the analyzer when multiple control flow paths lead to an instruction
+ * - the frame at the branching instruction is merged into the current frame of the
+ * instruction (held by the analyzer)
+ * - mutates the values of the current frame, merges all values using interpreter.merge.
+ *
+ * Analyzer
+ * - Stores a frame for each instruction
+ * - `merge` function takes an instruction and a frame, merges the existing frame for that instr
+ * (from the frames array) with the new frame passed as argument.
+ * if the frame changed, puts the instruction on the work queue (fixpiont).
+ * - initial frame: initialized for first instr by calling interpreter.new[...]Value
+ * for each slot (locals and params), stored in frames[firstInstr] by calling `merge`
+ * - work queue of instructions (`queue` array, `top` index for next instruction to analyze)
+ * - analyze(method): simulate control flow. while work queue non-empty:
+ * - copy the state of `frames[instr]` into a local frame `current`
+ * - call `current.execute(instr, interpreter)`, mutating the `current` frame
+ * - if it's a branching instruction
+ * - for all potential destination instructions
+ * - merge the destination instruction frame with the `current` frame
+ * (this enqueues the destination instr if its frame changed)
+ * - invoke `newControlFlowEdge` (see below)
+ * - the analyzer also tracks active exception handlers at each instruction
+ * - the empty method `newControlFlowEdge` can be overridden to track control flow if required
+ *
+ *
+ * Some notes on nullness analysis.
+ *
+ * For an instance method, `this` is non-null at entry. So we have to return a NotNull value when
+ * the analyzer is initializing the first frame of a method (see above). This required a change of
+ * the analyzer: before it would simply call `interpreter.newValue`, where we don't have the
+ * required context. See https://github.com/scala/scala-asm/commit/8133d75032.
+ *
+ * After some operations we know that a certain value is not null (e.g. the receiver of an instance
+ * call). However, the receiver is an value on the stack and consumed while interpreting the
+ * instruction - so we can only gain some knowledge if we know that the receiver was an alias of
+ * some other local variable or stack slot. Therefore we use the AliasingFrame class.
+ *
+ * TODO:
+ * Finally, we'd also like to exploit the knowledge gained from `if (x == null)` tests: x is known
+ * to be null in one branch, not null in the other. This will make use of alias tracking as well.
+ * We still have to figure out how to do this exactly in the analyzer framework.
+ */
+
+/**
+ * Type to represent nullness of values.
+ */
+sealed trait Nullness {
+ final def merge(other: Nullness) = if (this == other) this else Unknown
+}
+case object NotNull extends Nullness
+case object Unknown extends Nullness
+case object Null extends Nullness
+
+/**
+ * Represents the nullness state for a local variable or stack value.
+ *
+ * Note that nullness of primitive values is not tracked, it will be always [[Unknown]].
+ */
+sealed trait NullnessValue extends Value {
+ /**
+ * The nullness of this value.
+ */
+ def nullness: Nullness
+
+ /**
+ * True if this value is a long or double. The Analyzer framework needs to know
+ * the size of each value when interpreting instructions, see `Frame.execute`.
+ */
+ def isSize2: Boolean
+ /**
+ * The size of the slot described by this value. Cannot be 0 because no values are allocated
+ * for void-typed slots, see NullnessInterpreter.newValue.
+ **/
+ def getSize: Int = if (isSize2) 2 else 1
+
+ def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2)
+}
+
+object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" }
+object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" }
+object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" }
+object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" }
+
+object NullnessValue {
+ def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = {
+ if (nullness == Null) NullValue
+ else if (nullness == NotNull) NotNullValue
+ else if (isSize2) UnknownValue2
+ else UnknownValue1
+ }
+
+ def apply(nullness: Nullness, insn: AbstractInsnNode): NullnessValue = {
+ apply(nullness, isSize2 = BytecodeUtils.instructionResultSize(insn) == 2)
+ }
+}
+
+final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) {
+ def newValue(tp: Type): NullnessValue = {
+ // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter,
+ // which is provided by the framework.
+ //
+ // (1) For the void type, the ASM framework expects newValue to return `null`.
+ // Also, the Frame.returnValue field is `null` for methods with return type void.
+ // Example callsite passing VOID_TYPE: in Analyzer, `newValue(Type.getReturnType(m.desc))`.
+ //
+ // (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes
+ // `newValue(null)` for each local variable. We have to return a value of size 1.
+ if (tp == Type.VOID_TYPE) null // (1)
+ else NullnessValue(Unknown, isSize2 = tp != null /*(2)*/ && tp.getSize == 2 )
+ }
+
+ override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = {
+ // For instance methods, the `this` parameter is known to be not null.
+ if (isInstanceMethod && local == 0) NullnessValue(NotNull, isSize2 = false)
+ else super.newParameterValue(isInstanceMethod, local, tp)
+ }
+
+ def newOperation(insn: AbstractInsnNode): NullnessValue = {
+ val nullness = (insn.getOpcode: @switch) match {
+ case Opcodes.ACONST_NULL => Null
+
+ case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match {
+ case _: String | _: Type => NotNull
+ case _ => Unknown
+ }
+
+ case _ => Unknown
+ }
+
+ // for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call.
+ NullnessValue(nullness, insn)
+ }
+
+ def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value
+
+ def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.CHECKCAST => value
+
+ case Opcodes.NEWARRAY |
+ Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false)
+
+ case _ => NullnessValue(Unknown, insn)
+ }
+
+ def binaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, insn)
+ }
+
+ def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = {
+ NullnessValue(Unknown, isSize2 = false)
+ }
+
+ def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.MULTIANEWARRAY =>
+ NullnessValue(NotNull, isSize2 = false)
+
+ case _ =>
+ // TODO: use a list of methods that are known to return non-null values
+ NullnessValue(Unknown, insn)
+ }
+
+ def returnOperation(insn: AbstractInsnNode, value: NullnessValue, expected: NullnessValue): Unit = ()
+
+ def merge(a: NullnessValue, b: NullnessValue): NullnessValue = a merge b
+}
+
+class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessValue](nLocals, nStack) {
+ // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame`
+ def this(src: Frame[_ <: NullnessValue]) {
+ this(src.getLocals, src.getMaxStackSize)
+ init(src)
+ }
+
+ override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = {
+ import Opcodes._
+
+ // get the object id of the object that is known to be not-null after this operation
+ val nullCheckedAliasId: Long = (insn.getOpcode: @switch) match {
+ case IALOAD |
+ LALOAD |
+ FALOAD |
+ DALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD =>
+ aliasId(this.stackTop - 1)
+
+ case IASTORE |
+ FASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE |
+ LASTORE |
+ DASTORE =>
+ aliasId(this.stackTop - 2)
+
+ case GETFIELD =>
+ aliasId(this.stackTop)
+
+ case PUTFIELD =>
+ aliasId(this.stackTop - 1)
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKEINTERFACE =>
+ val desc = insn.asInstanceOf[MethodInsnNode].desc
+ val numArgs = Type.getArgumentTypes(desc).length
+ aliasId(this.stackTop - numArgs)
+
+ case ARRAYLENGTH |
+ MONITORENTER |
+ MONITOREXIT =>
+ aliasId(this.stackTop)
+
+ case _ =>
+ -1
+ }
+
+ super.execute(insn, interpreter)
+
+ if (nullCheckedAliasId != -1) {
+ for (i <- valuesWithAliasId(nullCheckedAliasId))
+ this.setValue(i, NotNullValue)
+ }
+ }
+}
+
+/**
+ * This class is required to override the `newFrame` methods, which makes makes sure the analyzer
+ * uses NullnessFrames.
+ */
+class NullnessAnalyzer extends Analyzer[NullnessValue](new NullnessInterpreter) {
+ override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack)
+ override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
index 607b7145d6..dbf19744fa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
@@ -127,7 +127,7 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav
case Nil => Left(failedClasses)
}
- // In a MethodInsnNode, the `owner` field may be an array descriptor, for exmple when invoking `clone`. We don't have a method node to return in this case.
+ // In a MethodInsnNode, the `owner` field may be an array descriptor, for example when invoking `clone`. We don't have a method node to return in this case.
if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[')
Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, Nil))
else
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
index 201ab15177..9bd016f964 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -170,6 +170,8 @@ object BytecodeUtils {
new InsnNode(op)
}
+ def instructionResultSize(instruction: AbstractInsnNode) = InstructionResultSize(instruction)
+
def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
@@ -328,13 +330,38 @@ object BytecodeUtils {
class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) {
val analyzer = new Analyzer(interpreter)
analyzer.analyze(classInternalName, methodNode)
- def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
+ def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode)
+ }
+
+ implicit class AnalyzerExtensions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal {
+ def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
}
- implicit class `frame extensions`[V <: Value](val frame: Frame[V]) extends AnyVal {
- def peekDown(n: Int): V = {
- val topIndex = frame.getStackSize - 1
- frame.getStack(topIndex - n)
+ implicit class FrameExtensions[V <: Value](val frame: Frame[V]) extends AnyVal {
+ /**
+ * The value `n` positions down the stack.
+ */
+ def peekStack(n: Int): V = frame.getStack(frame.getStackSize - 1 - n)
+
+ /**
+ * The index of the current stack top.
+ */
+ def stackTop = frame.getLocals + frame.getStackSize - 1
+
+ /**
+ * Gets the value at slot i, where i may be a local or a stack index.
+ */
+ def getValue(i: Int): V = {
+ if (i < frame.getLocals) frame.getLocal(i)
+ else frame.getStack(i - frame.getLocals)
+ }
+
+ /**
+ * Sets the value at slot i, where i may be a local or a stack index.
+ */
+ def setValue(i: Int, value: V): Unit = {
+ if (i < frame.getLocals) frame.setLocal(i, value)
+ else frame.setStack(i - frame.getLocals, value)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
index 028f0f8fa6..0932564b1f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -8,12 +8,15 @@ package backend.jvm
package opt
import scala.reflect.internal.util.{NoPosition, Position}
+import scala.tools.asm.tree.analysis.{Value, Analyzer, BasicInterpreter}
+import scala.tools.asm.{Opcodes, Type}
import scala.tools.asm.tree._
import scala.collection.convert.decorateAsScala._
-import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InternalName}
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import scala.tools.nsc.backend.jvm.analysis.{NotNull, NullnessAnalyzer}
import ByteCodeRepository.{Source, CompilationUnit}
+import BytecodeUtils._
class CallGraph[BT <: BTypes](val btypes: BT) {
import btypes._
@@ -93,12 +96,25 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
// TODO: run dataflow analyses to make the call graph more precise
// - producers to get forwarded parameters (ForwardedParam)
// - typeAnalysis for more precise argument types, more precise callee
- // - nullAnalysis to skip emitting the receiver-null-check when inlining
- // TODO: for now we run a basic analyzer to get the stack height at the call site.
- // once we run a more elaborate analyzer (types, nullness), we can get the stack height out of there.
+ // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance
+ // call is known to be not-null, in which case we don't have to emit a null check when inlining.
+ // It is also used to get the stack height at the call site.
localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName)
- val analyzer = new AsmAnalyzer(methodNode, definingClass.internalName)
+
+ val analyzer: Analyzer[_ <: Value] = {
+ if (compilerSettings.YoptNullnessTracking) new NullnessAnalyzer
+ else new Analyzer(new BasicInterpreter)
+ }
+ analyzer.analyze(definingClass.internalName, methodNode)
+
+ def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = analyzer match {
+ case nullnessAnalyzer: NullnessAnalyzer =>
+ val frame = nullnessAnalyzer.frameAt(call, methodNode)
+ frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull
+
+ case _ => false
+ }
methodNode.instructions.iterator.asScala.collect({
case call: MethodInsnNode =>
@@ -126,13 +142,19 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
Nil
}
+ val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || {
+ val numArgs = Type.getArgumentTypes(call.desc).length
+ receiverNotNullByAnalysis(call, numArgs)
+ }
+
Callsite(
callsiteInstruction = call,
callsiteMethod = methodNode,
callsiteClass = definingClass,
callee = callee,
argInfos = argInfos,
- callsiteStackHeight = analyzer.frameAt(call).getStackSize,
+ callsiteStackHeight = analyzer.frameAt(call, methodNode).getStackSize,
+ receiverKnownNotNull = receiverNotNull,
callsitePosition = callsitePositions.getOrElse(call, NoPosition)
)
}).toList
@@ -154,7 +176,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
*/
final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType,
callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo],
- callsiteStackHeight: Int, callsitePosition: Position) {
+ callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position) {
override def toString =
"Invocation of" +
s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" +
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
index ac5c9ce2e6..b4f091b37f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -49,7 +49,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
if (callGraph.callsites contains request.callsiteInstruction) {
val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass,
callee.callee, callee.calleeDeclarationClass,
- receiverKnownNotNull = false, keepLineNumbers = false)
+ request.receiverKnownNotNull, keepLineNumbers = false)
for (warning <- r) {
if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) {
@@ -89,7 +89,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
*/
def selectCallsitesForInlining: List[Callsite] = {
callsites.valuesIterator.filter({
- case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
val res = doInlineCallsite(callsite)
if (!res) {
@@ -112,7 +112,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
res
- case Callsite(ins, _, _, Left(warning), _, _, pos) =>
+ case Callsite(ins, _, _, Left(warning), _, _, _, pos) =>
if (warning.emitWarning(compilerSettings))
backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
false
@@ -123,7 +123,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* The current inlining heuristics are simple: inline calls to methods annotated @inline.
*/
def doInlineCallsite(callsite: Callsite): Boolean = callsite match {
- case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline
else annotatedInline && safeToInline
@@ -189,7 +189,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
// there's no need to run eliminateUnreachableCode here. building the call graph does that
// already, no code can become unreachable in the meantime.
val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter)
- val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekDown(traitMethodArgumentTypes.length)
+ val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekStack(traitMethodArgumentTypes.length)
for (i <- receiverValue.insns.asScala) {
val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName)
callsite.callsiteMethod.instructions.insert(i, cast)
@@ -215,6 +215,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
calleeInfoWarning = infoWarning)),
argInfos = Nil,
callsiteStackHeight = callsite.callsiteStackHeight,
+ receiverKnownNotNull = callsite.receiverKnownNotNull,
callsitePosition = callsite.callsitePosition
)
callGraph.callsites(newCallsiteInstruction) = staticCallsite
@@ -400,7 +401,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
val inlinedReturn = instructionMap(originalReturn)
val returnReplacement = new InsnList
- def drop(slot: Int) = returnReplacement add getPop(frame.peekDown(slot).getSize)
+ def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize)
// for non-void methods, store the stack top into the return local variable
if (hasReturnValue) {
@@ -444,6 +445,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
callee = originalCallsite.callee,
argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them)
callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight,
+ receiverKnownNotNull = originalCallsite.receiverKnownNotNull,
callsitePosition = originalCallsite.callsitePosition
)
@@ -560,38 +562,62 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* @param memberDeclClass The class in which the member is declared (A)
* @param memberRefClass The class used in the member reference (B)
*
+ * (B0) JVMS 5.4.3.2 / 5.4.3.3: when resolving a member of class C in D, the class C is resolved
+ * first. According to 5.4.3.1, this requires C to be accessible in D.
+ *
* JVMS 5.4.4 summary: A field or method R is accessible to a class D (destinationClass) iff
* (B1) R is public
* (B2) R is protected, declared in C (memberDeclClass) and D is a subclass of C.
* If R is not static, R must contain a symbolic reference to a class T (memberRefClass),
* such that T is either a subclass of D, a superclass of D, or D itself.
+ * Also (P) needs to be satisfied.
* (B3) R is either protected or has default access and declared by a class in the same
* run-time package as D.
+ * If R is protected, also (P) needs to be satisfied.
* (B4) R is private and is declared in D.
+ *
+ * (P) When accessing a protected instance member, the target object on the stack (the receiver)
+ * has to be a subtype of D (destinationClass). This is enforced by classfile verification
+ * (https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1.8).
+ *
+ * TODO: we cannot currently implement (P) because we don't have the necessary information
+ * available. Once we have a type propagation analysis implemented, we can extract the receiver
+ * type from there (https://github.com/scala-opt/scala/issues/13).
*/
def memberIsAccessible(memberFlags: Int, memberDeclClass: ClassBType, memberRefClass: ClassBType): Either[OptimizerWarning, Boolean] = {
// TODO: B3 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok?
def samePackageAsDestination = memberDeclClass.packageInternalName == destinationClass.packageInternalName
-
- val key = (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE) & memberFlags
- key match {
- case ACC_PUBLIC => // B1
- Right(true)
-
- case ACC_PROTECTED => // B2
- tryEither {
- val condB2 = destinationClass.isSubtypeOf(memberDeclClass).orThrow && {
- val isStatic = (ACC_STATIC & memberFlags) != 0
- isStatic || memberRefClass.isSubtypeOf(destinationClass).orThrow || destinationClass.isSubtypeOf(memberRefClass).orThrow
+ def targetObjectConformsToDestinationClass = false // needs type propagation analysis, see above
+
+ def memberIsAccessibleImpl = {
+ val key = (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE) & memberFlags
+ key match {
+ case ACC_PUBLIC => // B1
+ Right(true)
+
+ case ACC_PROTECTED => // B2
+ val isStatic = (ACC_STATIC & memberFlags) != 0
+ tryEither {
+ val condB2 = destinationClass.isSubtypeOf(memberDeclClass).orThrow && {
+ isStatic || memberRefClass.isSubtypeOf(destinationClass).orThrow || destinationClass.isSubtypeOf(memberRefClass).orThrow
+ }
+ Right(
+ (condB2 || samePackageAsDestination /* B3 (protected) */) &&
+ (isStatic || targetObjectConformsToDestinationClass) // (P)
+ )
}
- Right(condB2 || samePackageAsDestination) // B3 (protected)
- }
- case 0 => // B3 (default access)
- Right(samePackageAsDestination)
+ case 0 => // B3 (default access)
+ Right(samePackageAsDestination)
+
+ case ACC_PRIVATE => // B4
+ Right(memberDeclClass == destinationClass)
+ }
+ }
- case ACC_PRIVATE => // B4
- Right(memberDeclClass == destinationClass)
+ classIsAccessible(memberDeclClass) match { // B0
+ case Right(true) => memberIsAccessibleImpl
+ case r => r
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
new file mode 100644
index 0000000000..8d744f6d13
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
@@ -0,0 +1,240 @@
+package scala.tools.nsc.backend.jvm.opt
+
+import scala.annotation.switch
+import scala.tools.asm.{Handle, Type, Opcodes}
+import scala.tools.asm.tree._
+
+object InstructionResultSize {
+ import Opcodes._
+ def apply(instruction: AbstractInsnNode): Int = (instruction.getOpcode: @switch) match {
+ // The order of opcodes is (almost) the same as in Opcodes.java
+ case ACONST_NULL => 1
+
+ case ICONST_M1 |
+ ICONST_0 |
+ ICONST_1 |
+ ICONST_2 |
+ ICONST_3 |
+ ICONST_4 |
+ ICONST_5 => 1
+
+ case LCONST_0 |
+ LCONST_1 => 2
+
+ case FCONST_0 |
+ FCONST_1 |
+ FCONST_2 => 1
+
+ case DCONST_0 |
+ DCONST_1 => 2
+
+ case BIPUSH |
+ SIPUSH => 1
+
+ case LDC =>
+ instruction.asInstanceOf[LdcInsnNode].cst match {
+ case _: java.lang.Integer |
+ _: java.lang.Float |
+ _: String |
+ _: Type |
+ _: Handle => 1
+
+ case _: java.lang.Long |
+ _: java.lang.Double => 2
+ }
+
+ case ILOAD |
+ FLOAD |
+ ALOAD => 1
+
+ case LLOAD |
+ DLOAD => 2
+
+ case IALOAD |
+ FALOAD |
+ AALOAD |
+ BALOAD |
+ CALOAD |
+ SALOAD => 1
+
+ case LALOAD |
+ DALOAD => 2
+
+ case ISTORE |
+ LSTORE |
+ FSTORE |
+ DSTORE |
+ ASTORE => 0
+
+ case IASTORE |
+ LASTORE |
+ FASTORE |
+ DASTORE |
+ AASTORE |
+ BASTORE |
+ CASTORE |
+ SASTORE => 0
+
+ case POP |
+ POP2 => 0
+
+ case DUP |
+ DUP_X1 |
+ DUP_X2 |
+ DUP2 |
+ DUP2_X1 |
+ DUP2_X2 |
+ SWAP => throw new IllegalArgumentException("Can't compute the size of DUP/SWAP without knowing what's on stack top")
+
+ case IADD |
+ FADD => 1
+
+ case LADD |
+ DADD => 2
+
+ case ISUB |
+ FSUB => 1
+
+ case LSUB |
+ DSUB => 2
+
+ case IMUL |
+ FMUL => 1
+
+ case LMUL |
+ DMUL => 2
+
+ case IDIV |
+ FDIV => 1
+
+ case LDIV |
+ DDIV => 2
+
+ case IREM |
+ FREM => 1
+
+ case LREM |
+ DREM => 2
+
+ case INEG |
+ FNEG => 1
+
+ case LNEG |
+ DNEG => 2
+
+ case ISHL |
+ ISHR => 1
+
+ case LSHL |
+ LSHR => 2
+
+ case IUSHR => 1
+
+ case LUSHR => 2
+
+ case IAND |
+ IOR |
+ IXOR => 1
+
+ case LAND |
+ LOR |
+ LXOR => 2
+
+ case IINC => 1
+
+ case I2F |
+ L2I |
+ L2F |
+ F2I |
+ D2I |
+ D2F |
+ I2B |
+ I2C |
+ I2S => 1
+
+ case I2L |
+ I2D |
+ L2D |
+ F2L |
+ F2D |
+ D2L => 2
+
+ case LCMP |
+ FCMPL |
+ FCMPG |
+ DCMPL |
+ DCMPG => 1
+
+ case IFEQ |
+ IFNE |
+ IFLT |
+ IFGE |
+ IFGT |
+ IFLE => 0
+
+ case IF_ICMPEQ |
+ IF_ICMPNE |
+ IF_ICMPLT |
+ IF_ICMPGE |
+ IF_ICMPGT |
+ IF_ICMPLE |
+ IF_ACMPEQ |
+ IF_ACMPNE => 0
+
+ case GOTO => 0
+
+ case JSR => throw new IllegalArgumentException("Subroutines are not supported.")
+
+ case RET => 0
+
+ case TABLESWITCH |
+ LOOKUPSWITCH => 0
+
+ case IRETURN |
+ FRETURN |
+ ARETURN => 1
+
+ case LRETURN |
+ DRETURN => 2
+
+ case RETURN => 0
+
+ case GETSTATIC => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTSTATIC => 0
+
+ case GETFIELD => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
+
+ case PUTFIELD => 0
+
+ case INVOKEVIRTUAL |
+ INVOKESPECIAL |
+ INVOKESTATIC |
+ INVOKEINTERFACE =>
+ val desc = instruction.asInstanceOf[MethodInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case INVOKEDYNAMIC =>
+ val desc = instruction.asInstanceOf[InvokeDynamicInsnNode].desc
+ Type.getReturnType(desc).getSize
+
+ case NEW => 1
+
+ case NEWARRAY |
+ ANEWARRAY |
+ ARRAYLENGTH => 1
+
+ case ATHROW => 0
+
+ case CHECKCAST |
+ INSTANCEOF => 1
+
+ case MONITORENTER |
+ MONITOREXIT => 0
+
+ case MULTIANEWARRAY => 1
+
+ case IFNULL |
+ IFNONNULL => 0
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
index 5f51a94673..bd5bab28b5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
@@ -262,7 +262,7 @@ object LocalOptImpls {
* the same index, but distinct start / end ranges are different variables, they may have not the
* same type or name.
*/
- def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
+ def removeUnusedLocalVariableNodes(method: MethodNode)(firstLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
start != end && (start match {
case v: VarInsnNode if v.`var` == varIndex => true
@@ -276,7 +276,7 @@ object LocalOptImpls {
val local = localsIter.next()
val index = local.index
// parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered
- if (index >= fistLocalIndex) {
+ if (index >= firstLocalIndex) {
if (!variableIsUsed(local.start, local.end, index)) localsIter.remove()
else if (renumber(index) != index) local.index = renumber(index)
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 1b6631e7a4..8911a3a28c 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -378,7 +378,7 @@ abstract class DeadCodeElimination extends SubComponent {
} else {
i match {
case NEW(REFERENCE(sym)) =>
- log(s"Eliminated instantation of $sym inside $m")
+ log(s"Eliminated instantiation of $sym inside $m")
case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
// if an unused instruction was a clobber of a used store to a reference or array type
// then we'll replace it with the store of a null to make sure the reference is
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 8f6fc65706..8cd2a14066 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -773,7 +773,7 @@ abstract class Inliners extends SubComponent {
staleOut += block
- tfa.remainingCALLs.remove(instr) // this bookkpeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
+ tfa.remainingCALLs.remove(instr) // this bookkeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
tfa.isOnWatchlist.remove(instr) // ditto
tfa.warnIfInlineFails.remove(instr)
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
index 84e21a3ccd..85c7c3c843 100644
--- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -61,7 +61,7 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
}
/**
- * This type of classpath is closly related to the support for JSR-223.
+ * This type of classpath is closely related to the support for JSR-223.
* Its usage can be observed e.g. when running:
* jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala
* with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index d34c14be0f..9708cba281 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -761,9 +761,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val interfaces = interfacesOpt()
accept(LBRACE)
val buf = new ListBuffer[Tree]
+ var enumIsFinal = true
def parseEnumConsts() {
if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
- buf += enumConst(enumType)
+ val (const, hasClassBody) = enumConst(enumType)
+ buf += const
+ // if any of the enum constants has a class body, the enum class is not final (JLS 8.9.)
+ enumIsFinal &&= !hasClassBody
if (in.token == COMMA) {
in.nextToken()
parseEnumConsts()
@@ -793,15 +797,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
accept(RBRACE)
val superclazz =
AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
+ val finalFlag = if (enumIsFinal) Flags.FINAL else 0l
+ val abstractFlag = {
+ // javac adds `ACC_ABSTRACT` to enum classes with deferred members
+ val hasAbstractMember = body exists {
+ case d: DefDef => d.mods.isDeferred
+ case _ => false
+ }
+ if (hasAbstractMember) Flags.ABSTRACT else 0l
+ }
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
- ClassDef(mods | Flags.ENUM, name, List(),
+ ClassDef(mods | Flags.ENUM | finalFlag | abstractFlag, name, List(),
makeTemplate(superclazz :: interfaces, body))
})
}
- def enumConst(enumType: Tree) = {
+ def enumConst(enumType: Tree): (ValDef, Boolean) = {
annotations()
- atPos(in.currentPos) {
+ var hasClassBody = false
+ val res = atPos(in.currentPos) {
val name = ident()
if (in.token == LPAREN) {
// skip arguments
@@ -809,12 +823,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
accept(RPAREN)
}
if (in.token == LBRACE) {
+ hasClassBody = true
// skip classbody
skipAhead()
accept(RBRACE)
}
ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
}
+ (res, hasClassBody)
}
def typeDecl(mods: Modifiers): List[Tree] = in.token match {
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 35ee889c58..d3cdf69d30 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -76,7 +76,7 @@ trait ScalaSettings extends AbsScalaSettings
val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views")
val higherKinds = Choice("higherKinds", "Allow higher-kinded types")
val existentials = Choice("existentials", "Existential types (besides wildcard types) can be written and inferred")
- val macros = Choice("experimental.macros", "Allow macro defintion (besides implementation and application)")
+ val macros = Choice("experimental.macros", "Allow macro definition (besides implementation and application)")
}
val language = {
val description = "Enable or disable language features"
@@ -234,15 +234,16 @@ trait ScalaSettings extends AbsScalaSettings
val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
- val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled")
- val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath")
+ val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.")
+ val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled.")
+ val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath.")
val lNone = Choice("l:none", "Don't enable any optimizations.")
private val defaultChoices = List(unreachableCode)
val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
- private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals, nullnessTracking)
val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
private val projectChoices = List(lMethod, inlineProject)
@@ -264,6 +265,7 @@ trait ScalaSettings extends AbsScalaSettings
def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
+ def YoptNullnessTracking = Yopt.contains(YoptChoices.nullnessTracking)
def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject)
def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal)
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 41ce0837cb..59cc13c64e 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -23,7 +23,7 @@ trait Warnings {
val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.")
// SI-7712, SI-7707 warnUnused not quite ready for prime-time
- val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused.")
+ val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are unused.")
// currently considered too noisy for general use
val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 8fd2ea45e4..4f5589fd7c 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -8,7 +8,6 @@ package symtab
import classfile.ClassfileParser
import java.io.IOException
-import scala.compat.Platform.currentTime
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
import scala.reflect.io.{ AbstractFile, NoAbstractFile }
@@ -207,7 +206,7 @@ abstract class SymbolLoaders {
override def complete(root: Symbol) {
try {
- val start = currentTime
+ val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime())
val currentphase = phase
doComplete(root)
phase = currentphase
@@ -374,7 +373,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- /** used from classfile parser to avoid cyclies */
+ /** used from classfile parser to avoid cycles */
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 518a402230..660028eab8 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -539,6 +539,8 @@ abstract class ClassfileParser {
devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
case linked =>
if (!linked.isSealed)
+ // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking.
+ // This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags.
linked setFlag (SEALED | ABSTRACT)
linked addChild sym
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index ea46116976..438a71061e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -599,7 +599,7 @@ abstract class ICodeReader extends ClassfileParser {
}
case JVM.invokedynamic =>
// TODO, this is just a place holder. A real implementation must parse the class constant entry
- debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
+ debuglog("Found JVM invokedynamic instruction, inserting place holder ICode INVOKE_DYNAMIC.")
containsInvokeDynamic = true
val poolEntry = in.nextChar.toInt
in.skip(2)
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 19abe1d249..ddf003bb98 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -88,6 +88,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
case class InvokeDynamicLambda(tree: Apply) extends TransformedFunction
+ private val boxingBridgeMethods = mutable.ArrayBuffer[Tree]()
+
// here's the main entry point of the transform
override def transform(tree: Tree): Tree = tree match {
// the main thing we care about is lambdas
@@ -105,6 +107,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// ... or an invokedynamic call
super.transform(apply)
}
+ case Template(_, _, _) =>
+ try {
+ // during this call boxingBridgeMethods will be populated from the Function case
+ val Template(parents, self, body) = super.transform(tree)
+ Template(parents, self, body ++ boxingBridgeMethods)
+ } finally boxingBridgeMethods.clear()
case _ => super.transform(tree)
}
@@ -137,6 +145,64 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val isStatic = target.hasFlag(STATIC)
+ def createBoxingBridgeMethod(functionParamTypes: List[Type], functionResultType: Type): Tree = {
+ // Note: we bail out of this method and return EmptyTree if we find there is no adaptation required.
+ // If we need to improve performance, we could check the types first before creating the
+ // method and parameter symbols.
+ val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT)
+ var neededAdaptation = false
+ def boxedType(tpe: Type): Type = {
+ if (isPrimitiveValueClass(tpe.typeSymbol)) {neededAdaptation = true; ObjectTpe}
+ else if (enteringErasure(tpe.typeSymbol.isDerivedValueClass)) {neededAdaptation = true; ObjectTpe}
+ else tpe
+ }
+ val targetParams: List[Symbol] = target.paramss.head
+ val numCaptures = targetParams.length - functionParamTypes.length
+ val (targetCaptureParams, targetFunctionParams) = targetParams.splitAt(numCaptures)
+ val bridgeParams: List[Symbol] =
+ targetCaptureParams.map(param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName)) :::
+ map2(targetFunctionParams, functionParamTypes)((param, tp) => methSym.newSyntheticValueParam(boxedType(tp), param.name.toTermName))
+
+ val bridgeResultType: Type = {
+ if (target.info.resultType == UnitTpe && functionResultType != UnitTpe) {
+ neededAdaptation = true
+ ObjectTpe
+ } else
+ boxedType(functionResultType)
+ }
+ val methodType = MethodType(bridgeParams, bridgeResultType)
+ methSym setInfo methodType
+ if (!neededAdaptation)
+ EmptyTree
+ else {
+ val bridgeParamTrees = bridgeParams.map(ValDef(_))
+
+ oldClass.info.decls enter methSym
+
+ val body = localTyper.typedPos(originalFunction.pos) {
+ val newTarget = Select(gen.mkAttributedThis(oldClass), target)
+ val args: List[Tree] = mapWithIndex(bridgeParams) { (param, i) =>
+ if (i < numCaptures) {
+ gen.mkAttributedRef(param)
+ } else {
+ val functionParam = functionParamTypes(i - numCaptures)
+ val targetParam = targetParams(i)
+ if (enteringErasure(functionParam.typeSymbol.isDerivedValueClass)) {
+ val casted = cast(gen.mkAttributedRef(param), functionParam)
+ val unboxed = unbox(casted, ErasedValueType(functionParam.typeSymbol, targetParam.tpe)).modifyType(postErasure.elimErasedValueType)
+ unboxed
+ } else adaptToType(gen.mkAttributedRef(param), targetParam.tpe)
+ }
+ }
+ gen.mkMethodCall(newTarget, args)
+ }
+ val body1 = if (enteringErasure(functionResultType.typeSymbol.isDerivedValueClass))
+ adaptToType(box(body.setType(ErasedValueType(functionResultType.typeSymbol, body.tpe)), "boxing lambda target"), bridgeResultType)
+ else adaptToType(body, bridgeResultType)
+ val methDef0 = DefDef(methSym, List(bridgeParamTrees), body1)
+ postErasure.newTransformer(unit).transform(methDef0).asInstanceOf[DefDef]
+ }
+ }
/**
* Creates the apply method for the anonymous subclass of FunctionN
*/
@@ -292,22 +358,56 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
thisArg ::: captureArgs
}
- val functionalInterface = java8CompatFunctionalInterface(target, originalFunction.tpe)
+ val arity = originalFunction.vparams.length
+
+ // Reconstruct the type of the function entering erasure.
+ // We do this by taking the type after erasure, and re-boxing `ErasedValueType`.
+ //
+ // Unfortunately, the more obvious `enteringErasure(target.info)` doesn't work
+ // as we would like, value classes in parameter position show up as the unboxed types.
+ val (functionParamTypes, functionResultType) = exitingErasure {
+ def boxed(tp: Type) = tp match {
+ case ErasedValueType(valueClazz, _) => TypeRef(NoPrefix, valueClazz, Nil)
+ case _ => tp
+ }
+ // We don't need to deeply map `boxedValueClassType` over the infos as `ErasedValueType`
+ // will only appear directly as a parameter type in a method signature, as shown
+ // https://gist.github.com/retronym/ba81dbd462282c504ff8
+ val info = target.info
+ val boxedParamTypes = info.paramTypes.takeRight(arity).map(boxed)
+ (boxedParamTypes, boxed(info.resultType))
+ }
+ val functionType = definitions.functionType(functionParamTypes, functionResultType)
+
+ val (functionalInterface, isSpecialized) = java8CompatFunctionalInterface(target, functionType)
if (functionalInterface.exists) {
// Create a symbol representing a fictional lambda factory method that accepts the captured
// arguments and returns a Function.
- val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT)
+ val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT)
val argTypes: List[Type] = allCaptureArgs.map(_.tpe)
val params = msym.newSyntheticValueParams(argTypes)
- msym.setInfo(MethodType(params, originalFunction.tpe))
+ msym.setInfo(MethodType(params, functionType))
val arity = originalFunction.vparams.length
+ val lambdaTarget =
+ if (isSpecialized)
+ target
+ else {
+ createBoxingBridgeMethod(functionParamTypes, functionResultType) match {
+ case EmptyTree =>
+ target
+ case bridge =>
+ boxingBridgeMethods += bridge
+ bridge.symbol
+ }
+ }
+
// We then apply this symbol to the captures.
val apply = localTyper.typedPos(originalFunction.pos)(Apply(Ident(msym), allCaptureArgs)).asInstanceOf[Apply]
// The backend needs to know the target of the lambda and the functional interface in order
// to emit the invokedynamic instruction. We pass this information as tree attachment.
- apply.updateAttachment(LambdaMetaFactoryCapable(target, arity, functionalInterface))
+ apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, arity, functionalInterface))
InvokeDynamicLambda(apply)
} else {
val anonymousClassDef = makeAnonymousClass
@@ -344,7 +444,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
def adaptAndPostErase(tree: Tree, pt: Type): (Boolean, Tree) = {
val (needsAdapt, adaptedTree) = adapt(tree, pt)
val trans = postErasure.newTransformer(unit)
- val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 elimnates ErasedValueTypes
+ val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 eliminates ErasedValueTypes
(needsAdapt, postErasedTree)
}
@@ -469,33 +569,21 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol)
// The functional interface that can be used to adapt the lambda target method `target` to the
- // given function type. Returns `NoSymbol` if the compiler settings are unsuitable, or `LambdaMetaFactory`
- // would be unable to generate the correct implementation (e.g. functions referring to derived value classes)
- private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): Symbol = {
- val canUseLambdaMetafactory: Boolean = {
- val hasValueClass = exitingErasure {
- val methodType: Type = target.info
- methodType.exists(_.isInstanceOf[ErasedValueType])
- }
- settings.isBCodeActive && !hasValueClass
- }
-
- def functionalInterface: Symbol = {
- val sym = functionType.typeSymbol
- val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage
- val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs)
- val paramTps :+ restpe = functionType.typeArgs
- val arity = paramTps.length
- if (name1.toTypeName == sym.name) {
- val returnUnit = restpe.typeSymbol == UnitClass
- val functionInterfaceArray =
- if (returnUnit) currentRun.runDefinitions.Scala_Java8_CompatPackage_JProcedure
- else currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction
- functionInterfaceArray.apply(arity)
- } else {
- pack.info.decl(name1.toTypeName.prepend("J"))
- }
+ // given function type. Returns `NoSymbol` if the compiler settings are unsuitable.
+ private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): (Symbol, Boolean) = {
+ val canUseLambdaMetafactory = settings.isBCodeActive
+
+ val sym = functionType.typeSymbol
+ val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage
+ val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs)
+ val paramTps :+ restpe = functionType.typeArgs
+ val arity = paramTps.length
+ val isSpecialized = name1.toTypeName != sym.name
+ val functionalInterface = if (!isSpecialized) {
+ currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction(arity)
+ } else {
+ pack.info.decl(name1.toTypeName.prepend("J"))
}
- if (canUseLambdaMetafactory) functionalInterface else NoSymbol
+ (if (canUseLambdaMetafactory) functionalInterface else NoSymbol, isSpecialized)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 84be9c6e95..833f25537c 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -814,11 +814,10 @@ abstract class Erasure extends AddInterfaces
// specialized members have no type history before 'specialize', causing double def errors for curried defs
override def exclude(sym: Symbol): Boolean = (
sym.isType
- || sym.isPrivate
|| super.exclude(sym)
|| !sym.hasTypeAt(currentRun.refchecksPhase.id)
)
- override def matches(lo: Symbol, high: Symbol) = true
+ override def matches(lo: Symbol, high: Symbol) = !high.isPrivate
}
def isErasureDoubleDef(pair: SymbolPair) = {
import pair._
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 227c45b3a7..49a4990722 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -510,7 +510,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)}
// [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
- /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
+ /** the information needed to construct the boolean proposition that encodes the equality proposition (V = C)
*
* that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
* and the constant pattern yields a ValueConst C
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 6302e34ac9..451b72d498 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -248,7 +248,7 @@ trait MatchTranslation {
if (caseDefs forall treeInfo.isCatchCase) caseDefs
else {
val swatches = { // switch-catches
- // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out
+ // SI-7459 must duplicate here as we haven't committed to switch emission, and just figuring out
// if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in
// `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`.
val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index e1fe220556..e0fcc05de2 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -642,7 +642,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
}
// override def apply
- // debug.patmat("before fixerupper: "+ xTree)
+ // debug.patmat("before fixerUpper: "+ xTree)
// currentRun.trackerFactory.snapshot()
// debug.patmat("after fixerupper")
// currentRun.trackerFactory.snapshot()
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index 2753baa51d..b1783dc81f 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -112,8 +112,10 @@ trait ScalacPatternExpanders {
arityError("not enough")
else if (elementArity > 0 && !isSeq)
arityError("too many")
- else if (settings.warnStarsAlign && isSeq && productArity > 0 && (elementArity > 0 || !isStar))
- warn("A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).")
+ else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn {
+ if (isStar) "Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected."
+ else "A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime."
+ }
aligned
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index ec76ece00f..3274c86072 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -985,7 +985,7 @@ trait Implicits {
if (implicitInfoss.forall(_.isEmpty)) SearchFailure
else new ImplicitComputation(implicitInfoss, isLocalToCallsite) findBest()
- /** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
+ /** Produce an implicit info map, i.e. a map from the class symbols C of all parts of this type to
* the implicit infos in the companion objects of these class symbols C.
* The parts of a type is the smallest set of types that contains
* - the type itself
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index ab6b2a0b9d..a4c794e8cf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -934,7 +934,7 @@ trait Infer extends Checkable {
def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString ","
printTyping(tree, s"infer expr instance from pt=$pt, $infer_s")
- // SI-7899 infering by-name types is unsound. The correct behaviour is conditional because the hole is
+ // SI-7899 inferring by-name types is unsound. The correct behaviour is conditional because the hole is
// exploited in Scalaz (Free.scala), as seen in: run/t7899-regression.
def dropByNameIfStrict(tp: Type): Type = if (settings.inferByName) tp else dropByName(tp)
def targsStrict = if (targs eq null) null else targs mapConserve dropByNameIfStrict
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8b792998d4..2dfecbaea1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -4430,7 +4430,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
- // If the problem is with raw types, copnvert to existentials and try again.
+ // If the problem is with raw types, convert to existentials and try again.
// See #4712 for a case where this situation arises,
if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
val newtpe = rawToExistential(fun.tpe)
@@ -4975,7 +4975,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
- // (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
+ // (and the only typed trees we have been mangled so they're not quite the original tree anymore)
checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
result // you only get to see the wrapped tree after running this check :-p
} setType (result.tpe) setPos(result.pos)
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 352816803f..4ff7067a21 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -184,7 +184,7 @@ object DocStrings {
extractSectionTag(str, section) -> section
}
- /** Extract the section tag, treating the section tag as an indentifier */
+ /** Extract the section tag, treating the section tag as an identifier */
def extractSectionTag(str: String, section: (Int, Int)): String =
str.substring(section._1, skipTag(str, section._1))
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index e13aaad7bc..15d0f14938 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013
+// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015
package scala
@@ -26,12 +26,6 @@ package scala
* assert(javaVersion() == anonfun0())
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 620dcc19aa..572901c6f3 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -25,11 +25,8 @@ package scala
* }
* }}}
*
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
+ * Note that the difference between `Function1` and [[scala.PartialFunction]]
+ * is that the latter can specify inputs which it will not handle.
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 5690adb56a..e2c094ea40 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -25,12 +25,6 @@ package scala
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
- *
- * Note that `Function1` does not define a total function, as might
- * be suggested by the existence of [[scala.PartialFunction]]. The only
- * distinction between `Function1` and `PartialFunction` is that the
- * latter can specify inputs which it will not handle.
-
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 5c4b15ad0f..e2c271145d 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -619,29 +619,21 @@ trait Iterator[+A] extends TraversableOnce[A] {
def span(p: A => Boolean): (Iterator[A], Iterator[A]) = {
val self = buffered
- /*
- * Giving a name to following iterator (as opposed to trailing) because
- * anonymous class is represented as a structural type that trailing
- * iterator is referring (the finish() method) and thus triggering
- * handling of structural calls. It's not what's intended here.
- */
+ // Must be a named class to avoid structural call to finish from trailing iterator
class Leading extends AbstractIterator[A] {
- val lookahead = new mutable.Queue[A]
- def advance() = {
- self.hasNext && p(self.head) && {
- lookahead += self.next
- true
- }
+ private val drained = new mutable.Queue[A]
+ private var finished = false
+ def finish(): Unit = {
+ require(!finished)
+ finished = true
+ while (selfish) drained += self.next
}
- def finish() = {
- while (advance()) ()
- }
- def hasNext = lookahead.nonEmpty || advance()
+ private def selfish = self.hasNext && p(self.head)
+ def hasNext = if (finished) drained.nonEmpty else selfish
def next() = {
- if (lookahead.isEmpty)
- advance()
-
- lookahead.dequeue()
+ if (finished) drained.dequeue()
+ else if (selfish) self.next()
+ else empty.next()
}
}
val leading = new Leading
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index a0b0e1318b..b2e63daaba 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -36,7 +36,7 @@ trait Sorted[K, +This <: Sorted[K, This]] {
/** Creates a ranged projection of this collection. Any mutations in the
* ranged projection will update this collection and vice versa.
*
- * Note: keys are not garuanteed to be consistent between this collection
+ * Note: keys are not guaranteed to be consistent between this collection
* and the projection. This is the case for buffers where indexing is
* relative to the projection.
*
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 48a6ca5699..7b1997252d 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -462,6 +462,7 @@ object List extends SeqFactory[List] {
private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
private def writeObject(out: ObjectOutputStream) {
+ out.defaultWriteObject()
var xs: List[A] = orig
while (!xs.isEmpty) {
out.writeObject(xs.head)
@@ -473,6 +474,7 @@ object List extends SeqFactory[List] {
// Java serialization calls this before readResolve during de-serialization.
// Read the whole list and store it in `orig`.
private def readObject(in: ObjectInputStream) {
+ in.defaultReadObject()
val builder = List.newBuilder[A]
while (true) in.readObject match {
case ListSerializeEnd =>
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index f1ac161e9a..28e56a6d87 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -12,6 +12,9 @@ package immutable
import mutable.{ Builder, ListBuffer }
+// TODO: Now the specialization exists there is no clear reason to have
+// separate classes for Range/NumericRange. Investigate and consolidate.
+
/** `NumericRange` is a more generic version of the
* `Range` class which works with arbitrary types.
* It must be supplied with an `Integral` implementation of the
@@ -28,9 +31,6 @@ import mutable.{ Builder, ListBuffer }
* assert(r1 sameElements r2.map(_ - veryBig))
* }}}
*
- * TODO: Now the specialization exists there is no clear reason to have
- * separate classes for Range/NumericRange. Investigate and consolidate.
- *
* @author Paul Phillips
* @version 2.8
* @define Coll `NumericRange`
@@ -266,7 +266,7 @@ object NumericRange {
// Numbers may be big.
val one = num.one
val limit = num.fromInt(Int.MaxValue)
- def check(t: T): T =
+ def check(t: T): T =
if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.")
else t
// If the range crosses zero, it might overflow when subtracted
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 98266716cc..53af3ce158 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -56,11 +56,12 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
* @throws java.util.NoSuchElementException if the queue is too short.
*/
override def apply(n: Int): A = {
- val len = out.length
- if (n < len) out.apply(n)
+ val olen = out.length
+ if (n < olen) out.apply(n)
else {
- val m = n - len
- if (m < in.length) in.reverse.apply(m)
+ val m = n - olen
+ val ilen = in.length
+ if (m < ilen) in.apply(ilen - m - 1)
else throw new NoSuchElementException("index out of range")
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index ada3533a60..6c5b10e73b 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -153,7 +153,7 @@ import scala.language.implicitConversions
*
* - The fact that `tail` works at all is of interest. In the definition of
* `fibs` we have an initial `(0, 1, Stream(...))` so `tail` is deterministic.
- * If we deinfed `fibs` such that only `0` were concretely known then the act
+ * If we defined `fibs` such that only `0` were concretely known then the act
* of determining `tail` would require the evaluation of `tail` which would
* cause an infinite recursion and stack overflow. If we define a definition
* where the tail is not initially computable then we're going to have an
@@ -1134,7 +1134,13 @@ object Stream extends SeqFactory[Stream] {
* to streams.
*/
class ConsWrapper[A](tl: => Stream[A]) {
+ /** Construct a stream consisting of a given first element followed by elements
+ * from a lazily evaluated Stream.
+ */
def #::(hd: A): Stream[A] = cons(hd, tl)
+ /** Construct a stream consisting of the concatenation of the given stream and
+ * a lazily evaluated Stream.
+ */
def #:::(prefix: Stream[A]): Stream[A] = prefix append tl
}
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 47a623a616..46d5d0c69c 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -132,19 +132,25 @@ override def companion: GenericCompanion[Vector] = Vector
throw new IndexOutOfBoundsException(index.toString)
}
-
+ // If we have a default builder, there are faster ways to perform some operations
+ @inline private[this] def isDefaultCBF[A, B, That](bf: CanBuildFrom[Vector[A], B, That]): Boolean =
+ (bf eq IndexedSeq.ReusableCBF) || (bf eq collection.immutable.Seq.ReusableCBF) || (bf eq collection.Seq.ReusableCBF)
+
// SeqLike api
override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
- if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf
+ if (isDefaultCBF[A, B, That](bf))
+ updateAt(index, elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly
else super.updated(index, elem)(bf)
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
- if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
+ if (isDefaultCBF[A, B, That](bf))
+ appendFront(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly
else super.+:(elem)(bf)
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
- if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
+ if (isDefaultCBF(bf))
+ appendBack(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly
else super.:+(elem)(bf)
override def take(n: Int): Vector[A] = {
@@ -211,7 +217,8 @@ override def companion: GenericCompanion[Vector] = Vector
// concat (suboptimal but avoids worst performance gotchas)
override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- if (bf eq IndexedSeq.ReusableCBF) {
+ if (isDefaultCBF(bf)) {
+ // We are sure we will create a Vector, so let's do it efficiently
import Vector.{Log2ConcatFaster, TinyAppendFaster}
if (that.isEmpty) this.asInstanceOf[That]
else {
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 1906c47f61..2bc6738e53 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -15,7 +15,7 @@ import immutable.{List, Nil, ::}
import java.io.{ObjectOutputStream, ObjectInputStream}
import scala.annotation.migration
-/** A `Buffer` implementation back up by a list. It provides constant time
+/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
*
* @author Matthias Zenger
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index d6e2963ad8..6bb35606a6 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -49,7 +49,7 @@ object BigDecimal {
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Double.toString(d), mc))
+ new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
@@ -59,7 +59,7 @@ object BigDecimal {
* `0.1 != 0.1f`.
*/
def decimal(f: Float, mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(java.lang.Float.toString(f), mc))
+ new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index eafbf96993..9245798c17 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -134,7 +134,7 @@ object Numeric {
def div(x: Float, y: Float): Float = x / y
}
trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] {
- def quot(x: Float, y: Float): Float = (BigDecimal(x) / BigDecimal(y)).floatValue
+ def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue
def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue
}
implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering
@@ -158,7 +158,7 @@ object Numeric {
def div(x: Double, y: Double): Double = x / y
}
trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] {
- def quot(x: Double, y: Double): Double = (BigDecimal(x) / BigDecimal(y)).doubleValue
+ def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue
def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue
}
@@ -178,7 +178,7 @@ object Numeric {
def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
}
trait BigDecimalAsIfIntegral extends BigDecimalIsConflicted with Integral[BigDecimal] {
- def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x / y
+ def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x quot y
def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y
}
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index 74b0a9077b..e5e4668edb 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -63,12 +63,13 @@ object BooleanProp {
def valueIsTrue[T](key: String): BooleanProp = new BooleanPropImpl(key, _.toLowerCase == "true")
/** As an alternative, this method creates a BooleanProp which is true
- * if the key exists in the map. This way -Dfoo.bar is enough to be
- * considered true.
+ * if the key exists in the map and is not assigned a value other than "true",
+ * compared case-insensitively, or the empty string. This way -Dmy.property
+ * results in a true-valued property, but -Dmy.property=false does not.
*
* @return A BooleanProp with a liberal truth policy
*/
- def keyExists[T](key: String): BooleanProp = new BooleanPropImpl(key, _ => true)
+ def keyExists[T](key: String): BooleanProp = new BooleanPropImpl(key, s => s == "" || s.equalsIgnoreCase("true"))
/** A constant true or false property which ignores all method calls.
*/
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 2e021ad9d9..ee2bdbc4a7 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson **
+** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,518 +9,276 @@
package scala
package util
-import scala.reflect.{ ClassTag, classTag }
-import scala.math.{ Ordering, max, min }
+import scala.reflect.ClassTag
+import scala.math.Ordering
-/** The Sorting object provides functions that can sort various kinds of
- * objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as [[scala.math.Ordered]]. Some sorts that
- * operate directly on a subset of value types are also provided. These
- * implementations are derived from those in the Sun JDK.
+/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`.
+ * Methods that defer to `java.util.Arrays.sort` say that they do or under what
+ * conditions that they do.
*
- * Note that stability doesn't matter for value types, so use the `quickSort`
- * variants for those. `stableSort` is intended to be used with
- * objects when the prior ordering should be preserved, where possible.
+ * `Sorting` also implements a general-purpose quicksort and stable (merge) sort
+ * for those cases where `java.util.Arrays.sort` could only be used at the cost
+ * of a large memory penalty. If performance rather than memory usage is the
+ * primary concern, one may wish to find alternate strategies to use
+ * `java.util.Arrays.sort` directly e.g. by boxing primitives to use
+ * a custom ordering on them.
+ *
+ * `Sorting` provides methods where you can provide a comparison function, or
+ * can request a sort of items that are [[scala.math.Ordered]] or that
+ * otherwise have an implicit or explicit [[scala.math.Ordering]].
+ *
+ * Note also that high-performance non-default sorts for numeric types
+ * are not provided. If this is required, it is advisable to investigate
+ * other libraries that cover this use case.
*
* @author Ross Judson
- * @version 1.0
+ * @author Adriaan Moors
+ * @author Rex Kerr
+ * @version 1.1
*/
object Sorting {
- /** Quickly sort an array of Doubles. */
- def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of items with an implicit Ordering. */
- def quickSort[K: Ordering](a: Array[K]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Ints. */
- def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
-
- /** Quickly sort an array of Floats. */
- def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
-
- /** Sort an array of K where K is Ordered, preserving the existing order
- * where the values are equal. */
- def stableSort[K: ClassTag: Ordering](a: Array[K]) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _)
- }
+ /** Sort an array of Doubles using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a)
- /** Sorts an array of `K` given an ordering function `f`.
- * `f` should return `true` iff its first parameter is strictly less than its second parameter.
- */
- def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), f)
- }
+ /** Sort an array of Ints using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a)
- /** Sorts an arbitrary sequence into an array, given a comparison function
- * that should return `true` iff parameter one is strictly less than parameter two.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
- val ret = a.toArray
- stableSort(ret, f)
- ret
- }
+ /** Sort an array of Floats using `java.util.Arrays.sort`. */
+ def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
+
+ private final val qsortThreshold = 16
- /** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] =
- stableSort(a, Ordering[K].lt _)
-
- /** Stably sorts a sequence of items given an extraction function that will
- * return an ordered key from an item.
- *
- * @param a the sequence to be sorted.
- * @param f the comparison function.
- * @return the sorted sequence of items.
- */
- def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] =
- stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f)
-
- private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) {
- val ord = Ordering[K]
- import ord._
-
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
- }
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
+ /** Sort array `a` with quicksort, using the Ordering on its elements.
+ * This algorithm sorts in place, so no additional memory is used aside from
+ * what might be required to box individual elements during comparison.
+ */
+ def quickSort[K: Ordering](a: Array[K]): Unit = {
+ // Must have iN >= i0 or math will fail. Also, i0 >= 0.
+ def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
+ if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ var iK = (i0 + iN) >>> 1 // Unsigned div by 2
+ // Find index of median of first, central, and last elements
+ var pL =
+ if (ord.compare(a(i0), a(iN - 1)) <= 0)
+ if (ord.compare(a(i0), a(iK)) < 0)
+ if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
+ else i0
+ else
+ if (ord.compare(a(i0), a(iK)) < 0) i0
+ else
+ if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1
+ else iK
+ val pivot = a(pL)
+ // pL is the start of the pivot block; move it into the middle if needed
+ if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK }
+ // Elements equal to the pivot will be in range pL until pR
+ var pR = pL + 1
+ // Items known to be less than pivot are below iA (range i0 until iA)
+ var iA = i0
+ // Items known to be greater than pivot are at or above iB (range iB until iN)
+ var iB = iN
+ // Scan through everything in the buffer before the pivot(s)
+ while (pL - iA > 0) {
+ val current = a(iA)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iA) = a(pL - 1)
+ a(pL - 1) = current
+ pL -= 1
+ case x if x < 0 =>
+ // Already in place. Just update indicies.
+ iA += 1
+ case _ if iB > pR =>
+ // Wrong side. There's room on the other side, so swap
+ a(iA) = a(iB - 1)
+ a(iB - 1) = current
+ iB -= 1
+ case _ =>
+ // Wrong side and there is no room. Swap by rotating pivot block.
+ a(iA) = a(pL - 1)
+ a(pL - 1) = a(pR - 1)
+ a(pR - 1) = current
+ pL -= 1
+ pR -= 1
+ iB -= 1
}
- i += 1
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
+ // Get anything remaining in buffer after the pivot(s)
+ while (iB - pR > 0) {
+ val current = a(iB - 1)
+ ord.compare(current, pivot) match {
+ case 0 =>
+ // Swap current out with pivot block
+ a(iB - 1) = a(pR)
+ a(pR) = current
+ pR += 1
+ case x if x > 0 =>
+ // Already in place. Just update indices.
+ iB -= 1
+ case _ =>
+ // Wrong side and we already know there is no room. Swap by rotating pivot block.
+ a(iB - 1) = a(pR)
+ a(pR) = a(pL)
+ a(pL) = current
+ iA += 1
+ pL += 1
+ pR += 1
}
- m = med3(l, m, n) // Mid-size, med of 3
}
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) equiv v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) equiv v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen
+ if (iA - i0 < iN - iB) {
+ inner(a, i0, iA, ord) // True recursion
+ inner(a, iB, iN, ord) // Should be tail recursion
+ }
+ else {
+ inner(a, iB, iN, ord) // True recursion
+ inner(a, i0, iA, ord) // Should be tail recursion
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
}
}
- sort2(off, len)
+ inner(a, 0, a.length, implicitly[Ordering[K]])
}
-
- private def sort1(x: Array[Int], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ private final val mergeThreshold = 32
+
+ // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
+ // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
+ private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
+ val n = iN - i0
+ if (n < 2) return
+ if (ord.compare(a(i0), a(i0+1)) > 0) {
+ val temp = a(i0)
+ a(i0) = a(i0+1)
+ a(i0+1) = temp
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- if (x(a) < x(b)) {
- if (x(b) < x(c)) b else if (x(a) < x(c)) c else a
- } else {
- if (x(b) > x(c)) b else if (x(a) > x(c)) c else a
- }
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j>off && x(j-1) > x(j)) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
+ var m = 2
+ while (m < n) {
+ // Speed up already-sorted case by checking last element first
+ val next = a(i0 + m)
+ if (ord.compare(next, a(i0+m-1)) < 0) {
+ var iA = i0
+ var iB = i0 + m - 1
+ while (iB - iA > 1) {
+ val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2
+ if (ord.compare(next, a(ix)) < 0) iB = ix
+ else iA = ix
}
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- while (b <= c && x(b) <= v) {
- if (x(b) == v) {
- swap(a, b)
- a += 1
- }
- b += 1
- }
- while (c >= b && x(c) >= v) {
- if (x(c) == v) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
+ val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1)
+ var i = i0 + m
+ while (i > ix) {
+ a(i) = a(i-1)
+ i -= 1
}
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ a(ix) = next
}
+ m += 1
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Double], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
+ private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
+ if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
+ else {
+ val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow
+ val sc = if (scratch eq null) new Array[T](iK - i0) else scratch
+ mergeSort(a, i0, iK, ord, sc)
+ mergeSort(a, iK, iN, ord, sc)
+ mergeSorted(a, i0, iK, iN, ord, sc)
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
+ }
+
+ // Must have 0 <= i0 < iK < iN
+ private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
+ // Check to make sure we're not already in order
+ if (ord.compare(a(iK-1), a(iK)) > 0) {
+ var i = i0
+ val jN = iK - i0
+ var j = 0
+ while (i < iK) {
+ scratch (j) = a(i)
i += 1
- a += 1
- b += 1
- }
- }
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
+ j += 1
}
- }
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
-
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
-
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
-
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
+ var k = i0
+ j = 0
+ while (i < iN && j < jN) {
+ if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 }
+ else { a(k) = scratch(j); j += 1 }
+ k += 1
}
+ while (j < jN) { a(k) = scratch(j); j += 1; k += 1 }
+ // Don't need to finish a(i) because it's already in place, k = i
}
- sort2(off, len)
}
-
- private def sort1(x: Array[Float], off: Int, len: Int) {
- def swap(a: Int, b: Int) {
- val t = x(a)
- x(a) = x(b)
- x(b) = t
+
+ // Why would you even do this?
+ private def booleanSort(a: Array[Boolean]): Unit = {
+ var i = 0
+ var n = 0
+ while (i < a.length) {
+ if (!a(i)) n += 1
+ i += 1
}
- def vecswap(_a: Int, _b: Int, n: Int) {
- var a = _a
- var b = _b
- var i = 0
- while (i < n) {
- swap(a, b)
- i += 1
- a += 1
- b += 1
- }
+ i = 0
+ while (i < n) {
+ a(i) = false
+ i += 1
}
- def med3(a: Int, b: Int, c: Int) = {
- val ab = x(a) compare x(b)
- val bc = x(b) compare x(c)
- val ac = x(a) compare x(c)
- if (ab < 0) {
- if (bc < 0) b else if (ac < 0) c else a
- } else {
- if (bc > 0) b else if (ac > 0) c else a
- }
+ while (i < a.length) {
+ a(i) = true
+ i += 1
}
- def sort2(off: Int, len: Int) {
- // Insertion sort on smallest arrays
- if (len < 7) {
- var i = off
- while (i < len + off) {
- var j = i
- while (j > off && (x(j-1) compare x(j)) > 0) {
- swap(j, j-1)
- j -= 1
- }
- i += 1
- }
- } else {
- // Choose a partition element, v
- var m = off + (len >> 1) // Small arrays, middle element
- if (len > 7) {
- var l = off
- var n = off + len - 1
- if (len > 40) { // Big arrays, pseudomedian of 9
- val s = len / 8
- l = med3(l, l+s, l+2*s)
- m = med3(m-s, m, m+s)
- n = med3(n-2*s, n-s, n)
- }
- m = med3(l, m, n) // Mid-size, med of 3
- }
- val v = x(m)
+ }
- // Establish Invariant: v* (<v)* (>v)* v*
- var a = off
- var b = a
- var c = off + len - 1
- var d = c
- var done = false
- while (!done) {
- var bv = x(b) compare v
- while (b <= c && bv <= 0) {
- if (bv == 0) {
- swap(a, b)
- a += 1
- }
- b += 1
- if (b <= c) bv = x(b) compare v
- }
- var cv = x(c) compare v
- while (c >= b && cv >= 0) {
- if (cv == 0) {
- swap(c, d)
- d -= 1
- }
- c -= 1
- if (c >= b) cv = x(c) compare v
- }
- if (b > c) {
- done = true
- } else {
- swap(b, c)
- c -= 1
- b += 1
- }
- }
+ // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
+ // Maybe also rename all these methods to `sort`.
+ @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
+ case _: Array[AnyRef] =>
+ // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
+ if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
+ java.util.Arrays.sort(a, ord)
+ case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord)
+ case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord)
+ case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful!
+ case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord)
+ case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord)
+ case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord)
+ case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord)
+ // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case.
+ case null => throw new NullPointerException
+ }
- // Swap partition elements back to middle
- val n = off + len
- var s = math.min(a-off, b-a)
- vecswap(off, b-s, s)
- s = math.min(d-c, n-d-1)
- vecswap(b, n-s, s)
+ // TODO: remove unnecessary ClassTag (not binary compatible)
+ /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K])
- // Recursively sort non-partition-elements
- s = b - a
- if (s > 1)
- sort2(off, s)
- s = d - c
- if (s > 1)
- sort2(n-s, s)
- }
- }
- sort2(off, len)
+ // TODO: Remove unnecessary ClassTag (not binary compatible)
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f)
+
+ /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[K])
+ ret
}
- private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
- if (lo < hi) {
- val mid = (lo+hi) / 2
- stableSort(a, lo, mid, scratch, f)
- stableSort(a, mid+1, hi, scratch, f)
- var k, t_lo = lo
- var t_hi = mid + 1
- while (k <= hi) {
- if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) {
- scratch(k) = a(t_lo)
- t_lo += 1
- } else {
- scratch(k) = a(t_hi)
- t_hi += 1
- }
- k += 1
- }
- k = lo
- while (k <= hi) {
- a(k) = scratch(k)
- k += 1
- }
- }
+ // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering)
+ /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering fromLessThan f)
+ ret
+ }
+
+ /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */
+ def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = {
+ val ret = a.toArray
+ sort(ret, Ordering[M] on f)
+ ret
}
}
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 1a13ac2305..b1b9965614 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -24,11 +24,12 @@ import scala.language.implicitConversions
*
* Example:
* {{{
+ * import scala.io.StdIn
* import scala.util.{Try, Success, Failure}
*
* def divide: Try[Int] = {
- * val dividend = Try(Console.readLine("Enter an Int that you'd like to divide:\n").toInt)
- * val divisor = Try(Console.readLine("Enter an Int that you'd like to divide by:\n").toInt)
+ * val dividend = Try(StdIn.readLine("Enter an Int that you'd like to divide:\n").toInt)
+ * val divisor = Try(StdIn.readLine("Enter an Int that you'd like to divide by:\n").toInt)
* val problem = dividend.flatMap(x => divisor.map(y => x/y))
* problem match {
* case Success(v) =>
@@ -47,7 +48,7 @@ import scala.language.implicitConversions
* catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
* pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
* upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
- * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * passed on down the chain. Combinators such as `recover` and `recoverWith` are designed to provide some type of
* default behavior in the case of failure.
*
* ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 5b65d6ab9b..1fde2370d3 100644
--- a/src/partest-extras/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -75,18 +75,20 @@ abstract class SessionTest extends ReplTest {
* Retain user input: prompt lines and continuations, without the prefix; or pasted text plus ctl-D.
*/
import SessionTest._
- override final def code = input findAllMatchIn (expected mkString ("", "\n", "\n")) map {
- case input(null, null, prompted) =>
+ lazy val pasted = input(prompt)
+ override final def code = pasted findAllMatchIn (expected mkString ("", "\n", "\n")) map {
+ case pasted(null, null, prompted) =>
def continued(m: Match): Option[String] = m match {
case margin(text) => Some(text)
case _ => None
}
margin.replaceSomeIn(prompted, continued)
- case input(cmd, pasted, null) =>
+ case pasted(cmd, pasted, null) =>
cmd + pasted + "\u0004"
} mkString
- final def prompt = "scala> "
+ // Just the last line of the interactive prompt
+ def prompt = "scala> "
/** Default test is to compare expected and actual output and emit the diff on a failed comparison. */
override def show() = {
@@ -98,7 +100,7 @@ abstract class SessionTest extends ReplTest {
}
object SessionTest {
// \R for line break is Java 8, \v for vertical space might suffice
- val input = """(?m)^scala> (:pa.*\u000A)// Entering paste mode.*\u000A\u000A((?:.*\u000A)*)\u000A// Exiting paste mode.*\u000A|^scala> (.*\u000A(?:\s*\| .*\u000A)*)""".r
+ def input(prompt: String) = s"""(?m)^$prompt(:pa.*\u000A)// Entering paste mode.*\u000A\u000A((?:.*\u000A)*)\u000A// Exiting paste mode.*\u000A|^scala> (.*\u000A(?:\\s*\\| .*\u000A)*)""".r
val margin = """(?m)^\s*\| (.*)$""".r
}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index bcad84a3f0..d3294dad9b 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -48,7 +48,7 @@ import scala.language.implicitConversions
* ''Of Note:'' This part of the Reflection API is being considered as a candidate for redesign. It is
* quite possible that in future releases of the reflection API, flag sets could be replaced with something else.
*
- * For more details about `FlagSet`s andĀ other aspects of Scala reflection, see the
+ * For more details about `FlagSet`s and other aspects of Scala reflection, see the
* [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
*
* @group ReflectionAPI
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 01b9759c70..c0abc5120c 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -130,7 +130,7 @@ import java.io.{ PrintWriter, StringWriter }
* TermName("y")#2541#GET))
* }}}
*
- * For more details about `Printer`s andĀ other aspects of Scala reflection, see the
+ * For more details about `Printer`s and other aspects of Scala reflection, see the
* [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
*
* @group ReflectionAPI
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 74310e1c34..1ba014d19d 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -60,7 +60,7 @@ trait AnnotationCheckers {
* mode (see method adapt in trait Typers).
*
* An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
- * class cannot do the adaptiong, it should return the tree unchanged.
+ * class cannot do the adapting, it should return the tree unchanged.
*/
@deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index e0a6757d34..53241fb15b 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -344,10 +344,12 @@ object ClassfileConstants {
case JAVA_ACC_STATIC => STATIC
case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
+ case JAVA_ACC_ENUM => ENUM
case _ => 0L
}
- private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
- def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
+ private def translateFlags(jflags: Int, baseFlags: Long, isClass: Boolean): Long = {
+ val isAnnot = isAnnotation(jflags)
+ def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnot, isClass)
var res: Long = JAVA | baseFlags
/* fast, elegant, maintainable, pick any two... */
res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
@@ -357,17 +359,18 @@ object ClassfileConstants {
res |= translateFlag0(jflags & JAVA_ACC_STATIC)
res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT)
res |= translateFlag0(jflags & JAVA_ACC_INTERFACE)
+ res |= translateFlag0(jflags & JAVA_ACC_ENUM)
res
}
def classFlags(jflags: Int): Long = {
- translateFlags(jflags, 0, isAnnotation(jflags), isClass = true)
+ translateFlags(jflags, 0, isClass = true)
}
def fieldFlags(jflags: Int): Long = {
- translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isClass = false)
}
def methodFlags(jflags: Int): Long = {
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isAnnotation(jflags), isClass = false)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isClass = false)
}
}
object FlagTranslation extends FlagTranslation { }
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 5b20d9db8e..f3dd6a3280 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -369,6 +369,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]]
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
+ lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]]
+ lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]]
lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe)
lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
@@ -514,6 +516,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
+ lazy val LambdaMetaFactory = getClassIfDefined("java.lang.invoke.LambdaMetafactory")
lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle")
// Option classes
@@ -1515,8 +1518,7 @@ trait Definitions extends api.StandardDefinitions {
private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists)
lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.compat.java8")
- lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i)))
- lazy val Scala_Java8_CompatPackage_JProcedure = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JProcedure" + i)))
+ lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxFunctionArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i)))
}
}
}
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
index eddfec82e7..d393a841b7 100644
--- a/src/reflect/scala/reflect/internal/ReificationSupport.scala
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -802,7 +802,7 @@ trait ReificationSupport { self: SymbolTable =>
require(enums.nonEmpty, "enumerators can't be empty")
enums.head match {
case SyntacticValFrom(_, _) =>
- case t => throw new IllegalArgumentException(s"$t is not a valid fist enumerator of for loop")
+ case t => throw new IllegalArgumentException(s"$t is not a valid first enumerator of for loop")
}
enums.tail.foreach {
case SyntacticValEq(_, _) | SyntacticValFrom(_, _) | SyntacticFilter(_) =>
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 15584c382c..26e3cf6d0b 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -1168,6 +1168,8 @@ trait StdNames {
final val Invoke: TermName = newTermName("invoke")
final val InvokeExact: TermName = newTermName("invokeExact")
+ final val AltMetafactory: TermName = newTermName("altMetafactory")
+
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
tpnme.Byte -> BoxedByte,
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index f9ebae64d2..478b1b9732 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -993,7 +993,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| isLocalToBlock
)
)
- /** Is this symbol effectively final or a concrete term member of sealed class whose childred do not override it */
+ /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */
final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden)
/** Is this symbol owned by a package? */
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index d6d2cf3383..891fccb3e1 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -564,7 +564,7 @@ private[internal] trait TypeMaps {
| tparams ${rhsSym.typeParams map own_s mkString ", "}
|"""
- if (argIndex < 0)
+ if (!rhsArgs.isDefinedAt(argIndex))
abort(s"Something is wrong: cannot find $lhs in applied type $rhs\n" + explain)
else {
val targ = rhsArgs(argIndex)
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
index e622e78d57..35858cdc78 100644
--- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -13,7 +13,7 @@ trait StripMarginInterpolator {
* The margin of each line is defined by whitespace leading up to a '|' character.
* This margin is stripped '''before''' the arguments are interpolated into to string.
*
- * String escape sequences are '''not''' processed; this interpolater is designed to
+ * String escape sequences are '''not''' processed; this interpolator is designed to
* be used with triple quoted Strings.
*
* {{{
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index ce60ade9f5..8c32a92ecd 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1184,6 +1184,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
+ if (jconstr.javaFlags.isVarargs) constr modifyInfo arrayToRepeated
markAllCompleted(constr)
constr
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index 1c0aa7cf6d..ea213cadd9 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -255,6 +255,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.JavaEnumClass
definitions.RemoteInterfaceClass
definitions.RemoteExceptionClass
+ definitions.JavaUtilMap
+ definitions.JavaUtilHashMap
definitions.ByNameParamClass
definitions.JavaRepeatedParamClass
definitions.RepeatedParamClass
@@ -310,6 +312,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.QuasiquoteClass_api_unapply
definitions.ScalaSignatureAnnotation
definitions.ScalaLongSignatureAnnotation
+ definitions.LambdaMetaFactory
definitions.MethodHandle
definitions.OptionClass
definitions.OptionModule
diff --git a/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala
index dddfb1b8f6..b6c9792ec0 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala
@@ -1,20 +1,22 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
+ * Copyright 2005-2015 LAMP/EPFL
* @author Paul Phillips
*/
-package scala.tools.nsc
-package interpreter
-package session
+package scala.tools.nsc.interpreter.jline
-import scala.tools.nsc.io._
-import FileBackedHistory._
+import _root_.jline.console.history.PersistentHistory
+
+
+import scala.tools.nsc.interpreter
+import scala.tools.nsc.io.{File, Path}
/** TODO: file locking.
- */
-trait FileBackedHistory extends JLineHistory with JPersistentHistory {
+ */
+trait FileBackedHistory extends JLineHistory with PersistentHistory {
def maxSize: Int
- protected lazy val historyFile: File = defaultFile
+
+ protected lazy val historyFile: File = FileBackedHistory.defaultFile
private var isPersistent = true
locally {
@@ -27,6 +29,7 @@ trait FileBackedHistory extends JLineHistory with JPersistentHistory {
try op
finally isPersistent = saved
}
+
def addLineToFile(item: CharSequence): Unit = {
if (isPersistent)
append(item + "\n")
@@ -37,6 +40,7 @@ trait FileBackedHistory extends JLineHistory with JPersistentHistory {
val lines = asStrings map (_ + "\n")
historyFile.writeAll(lines: _*)
}
+
/** Append one or more lines to the history file. */
protected def append(lines: String*): Unit = {
historyFile.appendAll(lines: _*)
@@ -54,31 +58,36 @@ trait FileBackedHistory extends JLineHistory with JPersistentHistory {
// than abandon hope we'll try to read it as ISO-8859-1
case _: Exception =>
try historyFile.lines("ISO-8859-1").toIndexedSeq
- catch { case _: Exception => Vector() }
+ catch {
+ case _: Exception => Vector()
+ }
}
}
- repldbg("Loading " + lines.size + " into history.")
+ interpreter.repldbg("Loading " + lines.size + " into history.")
// avoid writing to the history file
withoutSaving(lines takeRight maxSize foreach add)
// truncate the history file if it's too big.
if (lines.size > maxSize) {
- repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.")
+ interpreter.repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.")
sync()
}
moveToEnd()
}
def flush(): Unit = ()
+
def purge(): Unit = historyFile.truncate()
}
object FileBackedHistory {
// val ContinuationChar = '\003'
// val ContinuationNL: String = Array('\003', '\n').mkString
- import Properties.userHome
+
+ import scala.tools.nsc.Properties.userHome
def defaultFileName = ".scala_history"
+
def defaultFile: File = File(Path(userHome) / defaultFileName)
}
diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala
new file mode 100644
index 0000000000..c18a9809a0
--- /dev/null
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala
@@ -0,0 +1,25 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc.interpreter.jline
+
+import scala.tools.nsc.interpreter
+
+import _root_.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
+
+// implements a jline interface
+class JLineDelimiter extends ArgumentDelimiter {
+ def toJLine(args: List[String], cursor: Int) = args match {
+ case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor)
+ case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor)
+ }
+
+ def delimit(buffer: CharSequence, cursor: Int) = {
+ val p = interpreter.Parsed(buffer.toString, cursor)
+ toJLine(p.args, cursor)
+ }
+
+ def isDelimiter(buffer: CharSequence, cursor: Int) = interpreter.Parsed(buffer.toString, cursor).isDelimiter
+}
diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala
new file mode 100644
index 0000000000..1f6a1f7022
--- /dev/null
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala
@@ -0,0 +1,77 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc.interpreter.jline
+
+import java.util.{Iterator => JIterator, ListIterator => JListIterator}
+
+import _root_.jline.{console => jconsole}
+import jconsole.history.History.{Entry => JEntry}
+import jconsole.history.{History => JHistory}
+
+import scala.tools.nsc.interpreter
+import scala.tools.nsc.interpreter.session.{History, SimpleHistory}
+
+
+/** A straight scalification of the jline interface which mixes
+ * in the sparse jline-independent one too.
+ */
+trait JLineHistory extends JHistory with History {
+ def size: Int
+ def isEmpty: Boolean
+ def index: Int
+ def clear(): Unit
+ def get(index: Int): CharSequence
+ def add(line: CharSequence): Unit
+ def replace(item: CharSequence): Unit
+
+ def entries(index: Int): JListIterator[JEntry]
+ def entries(): JListIterator[JEntry]
+ def iterator: JIterator[JEntry]
+
+ def current(): CharSequence
+ def previous(): Boolean
+ def next(): Boolean
+ def moveToFirst(): Boolean
+ def moveToLast(): Boolean
+ def moveTo(index: Int): Boolean
+ def moveToEnd(): Unit
+
+ override def historicize(text: String): Boolean = {
+ text.lines foreach add
+ moveToEnd()
+ true
+ }
+}
+
+object JLineHistory {
+ class JLineFileHistory extends SimpleHistory with FileBackedHistory {
+ override def add(item: CharSequence): Unit = {
+ if (!isEmpty && last == item)
+ interpreter.repldbg("Ignoring duplicate entry '" + item + "'")
+ else {
+ super.add(item)
+ addLineToFile(item)
+ }
+ }
+ override def toString = "History(size = " + size + ", index = " + index + ")"
+
+ import scala.collection.JavaConverters._
+
+ override def asStrings(from: Int, to: Int): List[String] =
+ entries(from).asScala.take(to - from).map(_.value.toString).toList
+
+ case class Entry(index: Int, value: CharSequence) extends JEntry {
+ override def toString = value.toString
+ }
+
+ private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x)}
+ def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx)
+ def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator()
+ def iterator: JIterator[JEntry] = toEntries().iterator.asJava
+ }
+
+ def apply(): History = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() }
+}
diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
new file mode 100644
index 0000000000..f0fce13fe8
--- /dev/null
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
@@ -0,0 +1,143 @@
+/** NSC -- new Scala compiler
+ *
+ * Copyright 2005-2015 LAMP/EPFL
+ * @author Stepan Koltsov
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.interpreter.jline
+
+import java.util.{Collection => JCollection, List => JList}
+
+import _root_.jline.{console => jconsole}
+import jconsole.completer.{Completer, ArgumentCompleter}
+import jconsole.history.{History => JHistory}
+
+
+import scala.tools.nsc.interpreter
+import scala.tools.nsc.interpreter.Completion
+import scala.tools.nsc.interpreter.Completion.Candidates
+import scala.tools.nsc.interpreter.session.History
+
+/**
+ * Reads from the console using JLine.
+ *
+ * Eagerly instantiates all relevant JLine classes, so that we can detect linkage errors on `new JLineReader` and retry.
+ */
+class InteractiveReader(completer: () => Completion) extends interpreter.InteractiveReader {
+ val interactive = true
+
+ val history: History = new JLineHistory.JLineFileHistory()
+
+ private val consoleReader = {
+ val reader = new JLineConsoleReader()
+
+ reader setPaginationEnabled interpreter.`package`.isPaged
+
+ // ASAP
+ reader setExpandEvents false
+
+ reader setHistory history.asInstanceOf[JHistory]
+
+ reader
+ }
+
+ private[this] var _completion: Completion = interpreter.NoCompletion
+ def completion: Completion = _completion
+
+ override def postInit() = {
+ _completion = completer()
+
+ consoleReader.initCompletion(completion)
+ }
+
+ def reset() = consoleReader.getTerminal().reset()
+ def redrawLine() = consoleReader.redrawLineAndFlush()
+ def readOneLine(prompt: String) = consoleReader.readLine(prompt)
+ def readOneKey(prompt: String) = consoleReader.readOneKey(prompt)
+}
+
+// implements a jline interface
+private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter.VariColumnTabulator {
+ val isAcross = interpreter.`package`.isAcross
+ val marginSize = 3
+
+ def width = getTerminal.getWidth()
+ def height = getTerminal.getHeight()
+
+ private def morePrompt = "--More--"
+
+ private def emulateMore(): Int = {
+ val key = readOneKey(morePrompt)
+ try key match {
+ case '\r' | '\n' => 1
+ case 'q' => -1
+ case _ => height - 1
+ }
+ finally {
+ eraseLine()
+ // TODO: still not quite managing to erase --More-- and get
+ // back to a scala prompt without another keypress.
+ if (key == 'q') {
+ putString(getPrompt())
+ redrawLine()
+ flush()
+ }
+ }
+ }
+
+ override def printColumns(items: JCollection[_ <: CharSequence]): Unit = {
+ import scala.tools.nsc.interpreter.javaCharSeqCollectionToScala
+ printColumns_(items: List[String])
+ }
+
+ private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) {
+ val grouped = tabulate(items)
+ var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue
+ grouped foreach { xs =>
+ println(xs.mkString)
+ linesLeft -= 1
+ if (linesLeft <= 0) {
+ linesLeft = emulateMore()
+ if (linesLeft < 0)
+ return
+ }
+ }
+ }
+
+ def readOneKey(prompt: String) = {
+ this.print(prompt)
+ this.flush()
+ this.readCharacter()
+ }
+
+ def eraseLine() = resetPromptLine("", "", 0)
+
+ def redrawLineAndFlush(): Unit = {
+ flush(); drawLine(); flush()
+ }
+
+ // A hook for running code after the repl is done initializing.
+ def initCompletion(completion: Completion): Unit = {
+ this setBellEnabled false
+
+ if (completion ne interpreter.NoCompletion) {
+ val jlineCompleter = new ArgumentCompleter(new JLineDelimiter,
+ new Completer {
+ val tc = completion.completer()
+ def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
+ val buf = if (_buf == null) "" else _buf
+ val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
+ newCandidates foreach (candidates add _)
+ newCursor
+ }
+ }
+ )
+
+ jlineCompleter setStrict false
+
+ this addCompleter jlineCompleter
+ this setAutoprintThreshold 400 // max completion candidates without warning
+ }
+ }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
deleted file mode 100644
index b7f06f1d0a..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/Delimited.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
-
-class JLineDelimiter extends ArgumentDelimiter {
- def toJLine(args: List[String], cursor: Int) = args match {
- case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor)
- case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor)
- }
-
- def delimit(buffer: CharSequence, cursor: Int) = {
- val p = Parsed(buffer.toString, cursor)
- toJLine(p.args, cursor)
- }
- def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter
-}
-
-trait Delimited {
- self: Parsed =>
-
- def delimited: Char => Boolean
- def escapeChars: List[Char] = List('\\')
-
- /** Break String into args based on delimiting function.
- */
- protected def toArgs(s: String): List[String] =
- if (s == "") Nil
- else (s indexWhere isDelimiterChar) match {
- case -1 => List(s)
- case idx => (s take idx) :: toArgs(s drop (idx + 1))
- }
-
- def isDelimiterChar(ch: Char) = delimited(ch)
- def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
index 43e653edfd..844997429c 100644
--- a/src/repl/scala/tools/nsc/interpreter/Formatting.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
@@ -8,28 +8,25 @@ package interpreter
import util.stringFromWriter
-trait Formatting {
- def prompt: String
+class Formatting(indent: Int) {
- def spaces(code: String): String = {
+ private val indentation = " " * indent
+
+ private def indenting(code: String): Boolean = {
/** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */
val tokens = List("\"\"\"", "</", "/>")
val noIndent = (code contains "\n") && (tokens exists code.contains)
- if (noIndent) ""
- else prompt drop 1 map (_ => ' ')
+ !noIndent
}
/** Indent some code by the width of the scala> prompt.
* This way, compiler error messages read better.
*/
- def indentCode(code: String) = {
- val indent = spaces(code)
- stringFromWriter(str =>
- for (line <- code.lines) {
- str print indent
- str print (line + "\n")
- str.flush()
- }
- )
- }
+ def indentCode(code: String) = stringFromWriter(str =>
+ for (line <- code.lines) {
+ if (indenting(code)) str print indentation
+ str println line
+ str.flush()
+ }
+ )
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 4221126caa..525609171e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -26,6 +26,8 @@ import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
import java.io.{ BufferedReader, FileReader }
+import scala.util.{Try, Success, Failure}
+
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
* After instantiation, clients should call the main() method.
@@ -109,11 +111,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
class ILoopInterpreter extends IMain(settings, out) {
- outer =>
-
- override lazy val formatting = new Formatting {
- def prompt = ILoop.this.prompt
- }
+ // the expanded prompt but without color escapes and without leading newline, for purposes of indenting
+ override lazy val formatting: Formatting = new Formatting(
+ (replProps.promptString format Properties.versionNumberString).lines.toList.last.length
+ )
override protected def parentClassLoader =
settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
}
@@ -197,10 +198,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
echo("%d %s".format(index + offset, line))
}
- private val currentPrompt = Properties.shellPromptString
-
/** Prompt to print when awaiting input */
- def prompt = currentPrompt
+ def prompt = replProps.prompt
import LoopCommand.{ cmd, nullary }
@@ -410,14 +409,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
private def readOneLine() = {
- import scala.io.AnsiColor.{ MAGENTA, RESET }
out.flush()
- in readLine (
- if (replProps.colorOk)
- MAGENTA + prompt + RESET
- else
- prompt
- )
+ in readLine prompt
}
/** The main read-eval-print loop for the repl. It calls
@@ -503,10 +496,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
val errless = intp compileSources new BatchSourceFile("<pastie>", s"object pastel {\n$code\n}")
if (errless) echo("The compiler reports no errors.")
}
- def historicize(text: String) = history match {
- case jlh: JLineHistory => text.lines foreach jlh.add ; jlh.moveToEnd() ; true
- case _ => false
- }
+
def edit(text: String): Result = editor match {
case Some(ed) =>
val tmp = File.makeTemp()
@@ -522,7 +512,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
val res = intp interpret edited
if (res == IR.Incomplete) diagnose(edited)
else {
- historicize(edited)
+ history.historicize(edited)
Result(lineToRecord = Some(edited), keepRunning = true)
}
case None => echo("Can't read edited text. Did you delete it?")
@@ -533,7 +523,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
tmp.delete()
}
case None =>
- if (historicize(text)) echo("Placing text in recent history.")
+ if (history.historicize(text)) echo("Placing text in recent history.")
else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text")
}
@@ -565,10 +555,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
import scala.collection.JavaConverters._
val index = (start - 1) max 0
- val text = history match {
- case jlh: JLineHistory => jlh.entries(index).asScala.take(len) map (_.value) mkString "\n"
- case _ => history.asStrings.slice(index, index + len) mkString "\n"
- }
+ val text = history.asStrings(index, index + len) mkString "\n"
edit(text)
} catch {
case _: NumberFormatException => echo(s"Bad range '$what'")
@@ -774,8 +761,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
private object paste extends Pasted {
+ import scala.util.matching.Regex.quote
val ContinueString = " | "
- val PromptString = "scala> "
+ val PromptString = prompt.lines.toList.last
+ val anyPrompt = s"""\\s*(?:${quote(PromptString.trim)}|${quote(AltPromptString.trim)})\\s*""".r
+
+ def isPrompted(line: String) = matchesPrompt(line)
+ def isPromptOnly(line: String) = line match { case anyPrompt() => true ; case _ => false }
def interpret(line: String): Unit = {
echo(line.trim)
@@ -785,10 +777,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def transcript(start: String) = {
echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
- apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
+ apply(Iterator(start) ++ readWhile(!isPromptOnly(_)))
}
+
+ def unapply(line: String): Boolean = isPrompted(line)
+ }
+
+ private object invocation {
+ def unapply(line: String): Boolean = Completion.looksLikeInvocation(line)
}
- import paste.{ ContinueString, PromptString }
+
+ private val lineComment = """\s*//.*""".r // all comment
/** Interpret expressions starting with the first line.
* Read lines until a complete compilation unit is available
@@ -800,53 +799,42 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// signal completion non-completion input has been received
in.completion.resetVerbosity()
- def reallyInterpret = {
- val reallyResult = intp.interpret(code)
- (reallyResult, reallyResult match {
- case IR.Error => None
- case IR.Success => Some(code)
- case IR.Incomplete =>
- if (in.interactive && code.endsWith("\n\n")) {
- echo("You typed two blank lines. Starting a new command.")
+ def reallyInterpret = intp.interpret(code) match {
+ case IR.Error => None
+ case IR.Success => Some(code)
+ case IR.Incomplete if in.interactive && code.endsWith("\n\n") =>
+ echo("You typed two blank lines. Starting a new command.")
+ None
+ case IR.Incomplete =>
+ in.readLine(paste.ContinueString) match {
+ case null =>
+ // we know compilation is going to fail since we're at EOF and the
+ // parser thinks the input is still incomplete, but since this is
+ // a file being read non-interactively we want to fail. So we send
+ // it straight to the compiler for the nice error message.
+ intp.compileString(code)
None
- }
- else in.readLine(ContinueString) match {
- case null =>
- // we know compilation is going to fail since we're at EOF and the
- // parser thinks the input is still incomplete, but since this is
- // a file being read non-interactively we want to fail. So we send
- // it straight to the compiler for the nice error message.
- intp.compileString(code)
- None
-
- case line => interpretStartingWith(code + "\n" + line)
- }
- })
+
+ case line => interpretStartingWith(code + "\n" + line)
+ }
}
- /** Here we place ourselves between the user and the interpreter and examine
- * the input they are ostensibly submitting. We intervene in several cases:
+ /* Here we place ourselves between the user and the interpreter and examine
+ * the input they are ostensibly submitting. We intervene in several cases:
*
- * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
- * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
- * on the previous result.
- * 3) If the Completion object's execute returns Some(_), we inject that value
- * and avoid the interpreter, as it's likely not valid scala code.
+ * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+ * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+ * on the previous result.
+ * 3) If the Completion object's execute returns Some(_), we inject that value
+ * and avoid the interpreter, as it's likely not valid scala code.
*/
- if (code == "") None
- else if (!paste.running && code.trim.startsWith(PromptString)) {
- paste.transcript(code)
- None
- }
- else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
- interpretStartingWith(intp.mostRecentVar + code)
- }
- else if (code.trim startsWith "//") {
- // line comment, do nothing
- None
+ code match {
+ case "" => None
+ case lineComment() => None // line comment, do nothing
+ case paste() if !paste.running => paste.transcript(code) ; None
+ case invocation() if intp.mostRecentVar != "" => interpretStartingWith(intp.mostRecentVar + code)
+ case _ => reallyInterpret
}
- else
- reallyInterpret._2
}
// runs :load `file` on any files passed via -i
@@ -866,16 +854,36 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
* with SimpleReader.
*/
def chooseReader(settings: Settings): InteractiveReader = {
- if (settings.Xnojline || Properties.isEmacsShell)
- SimpleReader()
- else try new JLineReader(
- if (settings.noCompletion) NoCompletion
- else new JLineCompletion(intp)
- )
- catch {
- case ex @ (_: Exception | _: NoClassDefFoundError) =>
- echo(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.")
- SimpleReader()
+ if (settings.Xnojline || Properties.isEmacsShell) SimpleReader()
+ else {
+ type Completer = () => Completion
+ type ReaderMaker = Completer => InteractiveReader
+
+ def instantiate(className: String): ReaderMaker = completer => {
+ if (settings.debug) Console.println(s"Trying to instantiate a InteractiveReader from $className")
+ Class.forName(className).getConstructor(classOf[Completer]).
+ newInstance(completer).
+ asInstanceOf[InteractiveReader]
+ }
+
+ def mkReader(maker: ReaderMaker) =
+ if (settings.noCompletion) maker(() => NoCompletion)
+ else maker(() => new JLineCompletion(intp)) // JLineCompletion is a misnomer -- it's not tied to jline
+
+ def internalClass(kind: String) = s"scala.tools.nsc.interpreter.$kind.InteractiveReader"
+ val readerClasses = sys.props.get("scala.repl.reader").toStream ++ Stream(internalClass("jline"), internalClass("jline_embedded"))
+ val readers = readerClasses map (cls => Try { mkReader(instantiate(cls)) })
+
+ val reader = (readers collect { case Success(reader) => reader } headOption) getOrElse SimpleReader()
+
+ if (settings.debug) {
+ val readerDiags = (readerClasses, readers).zipped map {
+ case (cls, Failure(e)) => s" - $cls --> " + e.getStackTrace.mkString(e.toString+"\n\t", "\n\t","\n")
+ case (cls, Success(_)) => s" - $cls OK"
+ }
+ Console.println(s"All InteractiveReaders tried: ${readerDiags.mkString("\n","\n","\n")}")
+ }
+ reader
}
}
@@ -896,10 +904,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
asyncMessage(power.banner)
}
// SI-7418 Now, and only now, can we enable TAB completion.
- in match {
- case x: JLineReader => x.consoleReader.postInit
- case _ =>
- }
+ in.postInit()
}
// start an interpreter with the given settings
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index c281126d5f..2550a5dc57 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -69,6 +69,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// Used in a test case.
def showDirectory() = replOutput.show(out)
+ lazy val isClassBased: Boolean = settings.Yreplclassbased.value
+
private[nsc] var printResults = true // whether to print result lines
private[nsc] var totalSilence = false // whether to print anything
private var _initializeComplete = false // compiler is initialized
@@ -110,12 +112,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def this(factory: ScriptEngineFactory) = this(factory, new Settings())
def this() = this(new Settings())
- lazy val formatting: Formatting = new Formatting {
- val prompt = Properties.shellPromptString
- }
+ // the expanded prompt but without color escapes and without leading newline, for purposes of indenting
+ lazy val formatting: Formatting = new Formatting(
+ (replProps.promptString format Properties.versionNumberString).lines.toList.last.length
+ )
lazy val reporter: ReplReporter = new ReplReporter(this)
- import formatting._
+ import formatting.indentCode
import reporter.{ printMessage, printUntruncatedMessage }
// This exists mostly because using the reporter too early leads to deadlock.
@@ -310,8 +313,14 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
def originalPath(name: String): String = originalPath(TermName(name))
- def originalPath(name: Name): String = typerOp path name
- def originalPath(sym: Symbol): String = typerOp path sym
+ def originalPath(name: Name): String = translateOriginalPath(typerOp path name)
+ def originalPath(sym: Symbol): String = translateOriginalPath(typerOp path sym)
+ /** For class based repl mode we use an .INSTANCE accessor. */
+ val readInstanceName = if(isClassBased) ".INSTANCE" else ""
+ def translateOriginalPath(p: String): String = {
+ val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read)
+ p.replaceFirst(readName, readName + readInstanceName)
+ }
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
@@ -460,7 +469,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
- val content = indentCode(line)
+ val content = line //indentCode(line)
val trees = parse(content) match {
case parse.Incomplete => return Left(IR.Incomplete)
case parse.Error => return Left(IR.Error)
@@ -758,11 +767,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// object and we can do that much less wrapping.
def packageDecl = "package " + packageName
+ def pathToInstance(name: String) = packageName + "." + name + readInstanceName
def pathTo(name: String) = packageName + "." + name
def packaged(code: String) = packageDecl + "\n\n" + code
- def readPath = pathTo(readName)
- def evalPath = pathTo(evalName)
+ def readPathInstance = pathToInstance(readName)
+ def readPath = pathTo(readName)
+ def evalPath = pathTo(evalName)
def call(name: String, args: Any*): AnyRef = {
val m = evalMethod(name)
@@ -802,7 +813,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** The innermost object inside the wrapper, found by
* following accessPath into the outer one.
*/
- def resolvePathToSymbol(accessPath: String): Symbol = {
+ def resolvePathToSymbol(fullAccessPath: String): Symbol = {
+ val accessPath = fullAccessPath.stripPrefix(readPath)
val readRoot = readRootPath(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
@@ -849,7 +861,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def defines = defHandlers flatMap (_.definedSymbols)
def imports = importedSymbols
def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
-
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -858,6 +869,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** handlers for each tree in this request */
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ val definesClass = handlers.exists {
+ case _: ClassHandler => true
+ case _ => false
+ }
+
def defHandlers = handlers collect { case x: MemberDefHandler => x }
/** list of names used by this expression */
@@ -875,13 +891,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
* append to objectName to access anything bound by request.
*/
lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode))
+ exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass))
/** the line of code to compute */
def toCompute = line
/** The path of the value that contains the user code. */
- def fullAccessPath = s"${lineRep.readPath}$accessPath"
+ def fullAccessPath = s"${lineRep.readPathInstance}$accessPath"
/** The path of the given member of the wrapping instance. */
def fullPath(vname: String) = s"$fullAccessPath.`$vname`"
@@ -894,10 +910,10 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees"))
}
def preamble = s"""
- |$preambleHeader
- |%s%s%s
- """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString,
- importsPreamble, indentCode(toCompute))
+ |${preambleHeader format lineRep.readName}
+ |${envLines mkString (" ", ";\n ", ";\n")}
+ |$importsPreamble
+ |${indentCode(toCompute)}""".stripMargin
val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
@@ -911,7 +927,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap: String
}
- private class ObjectBasedWrapper extends Wrapper {
+ class ObjectBasedWrapper extends Wrapper {
def preambleHeader = "object %s {"
def postamble = importsTrailer + "\n}"
@@ -919,13 +935,16 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def postwrap = "}\n"
}
- private class ClassBasedWrapper extends Wrapper {
- def preambleHeader = "class %s extends Serializable {"
+ class ClassBasedWrapper extends Wrapper {
+ def preambleHeader = "class %s extends Serializable { "
/** Adds an object that instantiates the outer wrapping class. */
- def postamble = s"""$importsTrailer
+ def postamble = s"""
+ |$importsTrailer
+ |}
+ |object ${lineRep.readName} {
+ | val INSTANCE = new ${lineRep.readName}();
|}
- |object ${lineRep.readName} extends ${lineRep.readName}
|""".stripMargin
import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
@@ -935,7 +954,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
private lazy val ObjectSourceCode: Wrapper =
- if (settings.Yreplclassbased) new ClassBasedWrapper else new ObjectBasedWrapper
+ if (isClassBased) new ClassBasedWrapper else new ObjectBasedWrapper
private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
@@ -994,7 +1013,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+ lazy val resultSymbol = lineRep.resolvePathToSymbol(fullAccessPath)
def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 5244858a62..3ec77e46f1 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -92,7 +92,7 @@ trait Imports {
* last one imported is actually usable.
*/
case class ComputedImports(prepend: String, append: String, access: String)
- protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper): ComputedImports = {
+ protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = {
/** Narrow down the list of requests from which imports
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
@@ -107,6 +107,8 @@ trait Imports {
// Single symbol imports might be implicits! See bug #1752. Rather than
// try to finesse this, we will mimic all imports for now.
def keepHandler(handler: MemberHandler) = handler match {
+ /* While defining classes in class based mode - implicits are not needed. */
+ case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x))
case _: ImportHandler => true
case x => x.definesImplicit || (x.definedNames exists wanted)
}
@@ -146,7 +148,10 @@ trait Imports {
// loop through previous requests, adding imports for each one
wrapBeforeAndAfter {
+ // Reusing a single temporary value when import from a line with multiple definitions.
+ val tempValLines = mutable.Set[Int]()
for (ReqAndHandler(req, handler) <- reqsToUse) {
+ val objName = req.lineRep.readPathInstance
handler match {
// If the user entered an import, then just use it; add an import wrapping
// level if the import might conflict with some other import
@@ -157,6 +162,23 @@ trait Imports {
code append (x.member + "\n")
currentImps ++= x.importedNames
+ case x if isClassBased =>
+ for (imv <- x.definedNames) {
+ if (!currentImps.contains(imv)) {
+ x match {
+ case _: ClassHandler =>
+ code.append("import " + objName + req.accessPath + ".`" + imv + "`\n")
+ case _ =>
+ val valName = req.lineRep.packageName + req.lineRep.readName
+ if (!tempValLines.contains(req.lineRep.lineId)) {
+ code.append(s"val $valName = $objName\n")
+ tempValLines += req.lineRep.lineId
+ }
+ code.append(s"import $valName${req.accessPath}.`$imv`;\n")
+ }
+ currentImps += imv
+ }
+ }
// For other requests, import each defined name.
// import them explicitly instead of with _, so that
// ambiguity errors will not be generated. Also, quote
diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index ed69d449cb..71753a3e39 100644
--- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -13,6 +13,8 @@ import Properties.isMac
/** Reads lines from an input stream */
trait InteractiveReader {
+ def postInit(): Unit = {}
+
val interactive: Boolean
def reset(): Unit
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index c1122d4223..d878988e26 100644
--- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -12,6 +12,7 @@ import scala.reflect.internal.util.StringOps.longestCommonPrefix
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
+// TODO: change class name to reflect it's not specific to jline (nor does it depend on it)
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
deleted file mode 100644
index b6e834a1ed..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package scala.tools.nsc
-package interpreter
-
-import jline.console.ConsoleReader
-import jline.console.completer._
-import session._
-import Completion._
-
-/**
- * Reads from the console using JLine.
- */
-class JLineReader(_completion: => Completion) extends InteractiveReader {
- val interactive = true
- val consoleReader = new JLineConsoleReader()
-
- lazy val completion = _completion
- lazy val history: JLineHistory = JLineHistory()
-
- private def term = consoleReader.getTerminal()
- def reset() = term.reset()
-
- def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
- def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
- val buf = if (_buf == null) "" else _buf
- val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
- newCandidates foreach (candidates add _)
- newCursor
- }
- }
-
- class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper with VariColumnTabulator {
- val isAcross = interpreter.`package`.isAcross
-
- this setPaginationEnabled interpreter.`package`.isPaged
-
- // ASAP
- this setExpandEvents false
-
- // working around protected/trait/java insufficiencies.
- def goBack(num: Int): Unit = back(num)
- if ((history: History) ne NoHistory)
- this setHistory history
-
- def readOneKey(prompt: String) = {
- this.print(prompt)
- this.flush()
- this.readCharacter()
- }
- def eraseLine() = consoleReader.resetPromptLine("", "", 0)
- def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
-
- // A hook for running code after the repl is done initializing.
- lazy val postInit: Unit = {
- this setBellEnabled false
-
- if (completion ne NoCompletion) {
- val argCompletor: ArgumentCompleter =
- new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
- argCompletor setStrict false
-
- this addCompleter argCompletor
- this setAutoprintThreshold 400 // max completion candidates without warning
- }
- }
- }
-
- def redrawLine() = consoleReader.redrawLineAndFlush()
- def readOneLine(prompt: String) = consoleReader readLine prompt
- def readOneKey(prompt: String) = consoleReader readOneKey prompt
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
index 672a6fd28f..5e58d3a2c4 100644
--- a/src/repl/scala/tools/nsc/interpreter/Parsed.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
@@ -8,6 +8,25 @@ package interpreter
import util.returning
+trait Delimited {
+ self: Parsed =>
+
+ def delimited: Char => Boolean
+ def escapeChars: List[Char] = List('\\')
+
+ /** Break String into args based on delimiting function.
+ */
+ protected def toArgs(s: String): List[String] =
+ if (s == "") Nil
+ else (s indexWhere isDelimiterChar) match {
+ case -1 => List(s)
+ case idx => (s take idx) :: toArgs(s drop (idx + 1))
+ }
+
+ def isDelimiterChar(ch: Char) = delimited(ch)
+ def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
+}
+
/** One instance of a command buffer.
*/
class Parsed private (
diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
index f5db3d9e3a..5f388eb15b 100644
--- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
@@ -16,17 +16,21 @@ package interpreter
* the same result.
*/
abstract class Pasted {
+ def interpret(line: String): Unit
def ContinueString: String
def PromptString: String
- def interpret(line: String): Unit
+ def AltPromptString: String = "scala> "
+
+ private val testBoth = PromptString != AltPromptString
+ private val spacey = " \t".toSet
- def matchesPrompt(line: String) = matchesString(line, PromptString)
+ def matchesPrompt(line: String) = matchesString(line, PromptString) || testBoth && matchesString(line, AltPromptString)
def matchesContinue(line: String) = matchesString(line, ContinueString)
def running = isRunning
private def matchesString(line: String, target: String): Boolean = (
(line startsWith target) ||
- (line.nonEmpty && " \t".toSet(line.head) && matchesString(line.tail, target))
+ (line.nonEmpty && spacey(line.head) && matchesString(line.tail, target))
)
private def stripString(line: String, target: String) = line indexOf target match {
case -1 => line
@@ -39,7 +43,9 @@ abstract class Pasted {
private class PasteAnalyzer(val lines: List[String]) {
val referenced = lines flatMap (resReference findAllIn _.trim.stripPrefix("res")) toSet
- val cmds = lines reduceLeft append split PromptString filterNot (_.trim == "") toList
+ val ActualPromptString = lines find matchesPrompt map (s =>
+ if (matchesString(s, PromptString)) PromptString else AltPromptString) getOrElse PromptString
+ val cmds = lines reduceLeft append split ActualPromptString filterNot (_.trim == "") toList
/** If it's a prompt or continuation line, strip the formatting bits and
* assemble the code. Otherwise ship it off to be analyzed for res references
@@ -67,10 +73,10 @@ abstract class Pasted {
*/
def fixResRefs(code: String, line: String) = line match {
case resCreation(resName) if referenced(resName) =>
- code.lastIndexOf(PromptString) match {
+ code.lastIndexOf(ActualPromptString) match {
case -1 => code
case idx =>
- val (str1, str2) = code splitAt (idx + PromptString.length)
+ val (str1, str2) = code splitAt (idx + ActualPromptString.length)
str2 match {
case resAssign(`resName`) => code
case _ => "%sval %s = { %s }".format(str1, resName, str2)
@@ -79,10 +85,10 @@ abstract class Pasted {
case _ => code
}
- def run() {
+ def run(): Unit = {
println("// Replaying %d commands from transcript.\n" format cmds.size)
cmds foreach { cmd =>
- print(PromptString)
+ print(ActualPromptString)
interpret(cmd)
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index 8c4faf7278..df65e9974d 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -6,12 +6,13 @@
package scala.tools.nsc
package interpreter
+import Properties.shellPromptString
import scala.sys._
import Prop._
class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
- private def int(name: String) = IntProp(name)
+ private def int(name: String) = Prop[Int](name)
// This property is used in TypeDebugging. Let's recycle it.
val colorOk = bool("scala.color")
@@ -21,6 +22,14 @@ class ReplProps {
val trace = bool("scala.repl.trace")
val power = bool("scala.repl.power")
+ // Handy system prop for shell prompt, or else pick it up from compiler.properties
+ val promptString = Prop[String]("scala.repl.prompt").option getOrElse (if (info) "%nscala %s> " else shellPromptString)
+ val prompt = {
+ import scala.io.AnsiColor.{ MAGENTA, RESET }
+ val p = promptString format Properties.versionNumberString
+ if (colorOk) s"$MAGENTA$p$RESET" else p
+ }
+
/** CSV of paged,across to enable pagination or `-x` style
* columns, "across" instead of down the column. Since
* pagination turns off columnar output, these flags are
diff --git a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala
index a8d537e314..75bec168eb 100644
--- a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala
@@ -3,60 +3,7 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
-package interpreter
-
-import jline.console.{ ConsoleReader, CursorBuffer }
-
-trait ConsoleReaderHelper { _: ConsoleReader with Tabulator =>
- def isAcross: Boolean
-
- def terminal = getTerminal()
- def width = terminal.getWidth()
- def height = terminal.getHeight()
-
- def readOneKey(prompt: String): Int
- def eraseLine(): Unit
-
- val marginSize = 3
-
- private def morePrompt = "--More--"
- private def emulateMore(): Int = {
- val key = readOneKey(morePrompt)
- try key match {
- case '\r' | '\n' => 1
- case 'q' => -1
- case _ => height - 1
- }
- finally {
- eraseLine()
- // TODO: still not quite managing to erase --More-- and get
- // back to a scala prompt without another keypress.
- if (key == 'q') {
- putString(getPrompt())
- redrawLine()
- flush()
- }
- }
- }
-
- override def printColumns(items: JCollection[_ <: CharSequence]): Unit =
- printColumns_(items: List[String])
-
- private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) {
- val grouped = tabulate(items)
- var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue
- grouped foreach { xs =>
- println(xs.mkString)
- linesLeft -= 1
- if (linesLeft <= 0) {
- linesLeft = emulateMore()
- if (linesLeft < 0)
- return
- }
- }
- }
-}
+package scala.tools.nsc.interpreter
trait Tabulator {
def isAcross: Boolean
@@ -72,7 +19,7 @@ trait Tabulator {
)
protected def columnize(ss: Seq[String]): Seq[Seq[String]] = ss map (s => Seq(s))
protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = {
- import SimpleMath._
+ import scala.tools.nsc.interpreter.SimpleMath._
val longest = (items map (_.length)).max
val columnWidth = longest + marginSize
val maxcols = (
@@ -101,7 +48,7 @@ trait Tabulator {
/** Adjust the column width and number of columns to minimize the row count. */
trait VariColumnTabulator extends Tabulator {
override protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = {
- import SimpleMath._
+ import scala.tools.nsc.interpreter.SimpleMath._
val longest = (items map (_.length)).max
val shortest = (items map (_.length)).min
val fattest = longest + marginSize
diff --git a/src/repl/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala
index 794d41adc7..2028a13dfd 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/History.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala
@@ -11,7 +11,10 @@ package session
* reference to the jline classes. Very sparse right now.
*/
trait History {
+ def historicize(text: String): Boolean = false
+
def asStrings: List[String]
+ def asStrings(from: Int, to: Int): List[String] = asStrings.slice(from, to)
def index: Int
def size: Int
}
diff --git a/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
deleted file mode 100644
index 18e0ee7c85..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-package session
-
-/** A straight scalification of the jline interface which mixes
- * in the sparse jline-independent one too.
- */
-trait JLineHistory extends JHistory with History {
- def size: Int
- def isEmpty: Boolean
- def index: Int
- def clear(): Unit
- def get(index: Int): CharSequence
- def add(line: CharSequence): Unit
- def replace(item: CharSequence): Unit
-
- def entries(index: Int): JListIterator[JEntry]
- def entries(): JListIterator[JEntry]
- def iterator: JIterator[JEntry]
-
- def current(): CharSequence
- def previous(): Boolean
- def next(): Boolean
- def moveToFirst(): Boolean
- def moveToLast(): Boolean
- def moveTo(index: Int): Boolean
- def moveToEnd(): Unit
-}
-
-object JLineHistory {
- class JLineFileHistory extends SimpleHistory with FileBackedHistory {
- override def add(item: CharSequence): Unit = {
- if (!isEmpty && last == item)
- repldbg("Ignoring duplicate entry '" + item + "'")
- else {
- super.add(item)
- addLineToFile(item)
- }
- }
- override def toString = "History(size = " + size + ", index = " + index + ")"
- }
-
- def apply(): JLineHistory = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() }
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 7c49b91296..504d0d30ee 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -10,10 +10,9 @@ package session
import scala.collection.mutable.{ Buffer, ListBuffer }
import scala.collection.JavaConverters._
-class SimpleHistory extends JLineHistory {
+class SimpleHistory extends History {
private var _index: Int = 0
- private val buf: Buffer[String] = new ListBuffer[String]
- private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x) }
+ protected val buf: Buffer[String] = new ListBuffer[String]
private def setTo(num: Int) = { _index = num ; true }
private def minusOne = { _index -= 1 ; true }
private def plusOne = { _index += 1 ; true }
@@ -25,10 +24,6 @@ class SimpleHistory extends JLineHistory {
""
}
- case class Entry(index: Int, value: CharSequence) extends JEntry {
- override def toString = value
- }
-
def maxSize: Int = 2500
def last = if (isEmpty) fail("last") else buf.last
@@ -42,9 +37,6 @@ class SimpleHistory extends JLineHistory {
buf trimEnd 1
add(item)
}
- def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx)
- def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator()
- def iterator: JIterator[JEntry] = toEntries().iterator.asJava
def remove(idx: Int): CharSequence = buf remove idx
def removeFirst(): CharSequence = buf remove 0
diff --git a/src/repl/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
index a3d7312c98..06e7f6207b 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
@@ -14,10 +14,5 @@ package object session {
type JIterator[T] = java.util.Iterator[T]
type JListIterator[T] = java.util.ListIterator[T]
- type JEntry = jline.console.history.History.Entry
- type JHistory = jline.console.history.History
- type JMemoryHistory = jline.console.history.MemoryHistory
- type JPersistentHistory = jline.console.history.PersistentHistory
-
private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 44683f1755..067b2b2c29 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -143,7 +143,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"dot" // by default, just pick up the system-wide dot
)
- /** The maxium nuber of normal classes to show in the diagram */
+ /** The maximum number of normal classes to show in the diagram */
val docDiagramsMaxNormalClasses = IntSetting(
"-diagrams-max-classes",
"The maximum number of superclasses or subclasses to show in a diagram",
@@ -152,7 +152,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
_ => None
)
- /** The maxium nuber of implcit classes to show in the diagram */
+ /** The maximum number of implicit classes to show in the diagram */
val docDiagramsMaxImplicitClasses = IntSetting(
"-diagrams-max-implicits",
"The maximum number of implicitly converted classes to show in a diagram",
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 86155845b0..6cdd99c9ee 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -59,6 +59,7 @@ abstract class HtmlPage extends Page { thisPage =>
writeFile(site) { (w: Writer) =>
w.write(doctype.toString + "\n")
w.write(xml.Xhtml.toXhtml(html))
+ w.write('\n')
}
if (site.universe.settings.docRawOutput)
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 357c397f05..99f989aadf 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -177,7 +177,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
}
{
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index b478c6424c..4c0ba32856 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -383,7 +383,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
if (dotId.count(_ == '|') == 1) {
val Array(klass, id) = dotId.toString.split("\\|")
/* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
- * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
+ * tests like execute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
* back in the node */
val kind = getKind(klass)
if (kind != "")
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index 7fe8903c76..90de51d763 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -298,7 +298,7 @@ trait DocTemplateEntity extends MemberTemplateEntity {
/** The shadowing information for the implicitly added members */
def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
- /** Classes that can be implcitly converted to this class */
+ /** Classes that can be implicitly converted to this class */
def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
/** Classes to which this class can be implicitly converted to
@@ -484,10 +484,10 @@ trait ImplicitConversion {
/** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
def convertorMethod: Either[MemberEntity, String]
- /** A short name of the convertion */
+ /** A short name of the conversion */
def conversionShortName: String
- /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
+ /** A qualified name uniquely identifying the conversion (currently: the conversion method's qualified name) */
def conversionQualifiedName: String
/** The entity that performed the conversion */
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 96731acf56..178af3d4ba 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -478,7 +478,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override lazy val comment = {
def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] =
if (sym eq RootPackage) None else findTemplateMaybe(sym)
- /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+ /* Variable precendence order for implicitly added members: Take the variable definitions from ...
* 1. the target of the implicit conversion
* 2. the definition template (owner)
* 3. the current template
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index f984b4579f..db39d059d7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -90,8 +90,12 @@ trait ModelFactoryImplicitSupport {
else {
val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
- val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
+ val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) ++
+ global.analyzer.allViewsFrom(byNameType(sym.tpe_*), context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ //debug(results.mkString("All views\n ", "\n ", "\n"))
+ //debug(conversions.mkString("Conversions\n ", "\n ", "\n"))
+
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
@@ -193,7 +197,7 @@ trait ModelFactoryImplicitSupport {
List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
} catch {
case i: ImplicitNotFound =>
- //println(" Eliminating: " + toType)
+ //debug(s" Eliminating: $toType")
Nil
}
}
@@ -396,7 +400,7 @@ trait ModelFactoryImplicitSupport {
def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
- override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
+ override def toString = "Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
}
/* ========================= HELPER METHODS ========================== */
@@ -475,7 +479,7 @@ trait ModelFactoryImplicitSupport {
}
/**
- * Make implicits explicit - Not used curently
+ * Make implicits explicit - Not used currently
*/
// object implicitToExplicit extends TypeMap {
// def apply(tp: Type): Type = mapOver(tp) match {
@@ -557,7 +561,7 @@ trait ModelFactoryImplicitSupport {
*
* The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
* structure (A => B => C may not override (A, B) => C) and that all the types involved are
- * of the implcit conversion's member are subtypes of the parent members' parameters */
+ * of the implicit conversion's member are subtypes of the parent members' parameters */
def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
// Vlad: I tried using matches but it's not exactly what we need:
// (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index 87d7ece8f2..093899231e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -51,7 +51,7 @@ trait DiagramFactory extends DiagramDirectiveParser {
case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
}.reverse
- // incoming implcit conversions
+ // incoming implicit conversions
lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
case (incomingTpl, conv) =>
ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv))