summaryrefslogtreecommitdiff
path: root/src/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler')
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala2
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala1
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala6
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala2
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala6
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Parsers.scala2
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Reifiers.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala4
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala2
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala77
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala1
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala39
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala30
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala203
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Parsing.scala1
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Reporting.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala9
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala192
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala184
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala510
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala125
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala55
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala162
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FileUtils.scala68
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala101
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala38
-rw-r--r--src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala26
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala180
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala67
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala26
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala41
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala53
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala13
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala48
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Statics.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala106
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala25
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassFileLookup.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala138
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala8
-rw-r--r--src/compiler/scala/tools/util/Javap.scala32
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala102
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
93 files changed, 2510 insertions, 647 deletions
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index a13a778b2f..b8384851da 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -12,7 +12,7 @@ abstract class DefaultMacroCompiler extends Resolvers
import treeInfo._
import definitions._
val runDefinitions = currentRun.runDefinitions
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
val typer: global.analyzer.Typer
val context = typer.context
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
index cc4508e696..98fd091e9c 100644
--- a/src/compiler/scala/reflect/macros/compiler/Errors.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -11,7 +11,6 @@ trait Errors extends Traces {
import analyzer._
import definitions._
import treeInfo._
- import typer.TyperErrorGen._
import typer.infer.InferErrorGen._
import runDefinitions._
def globalSettings = global.settings
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
index 4484c234aa..d3f49390ea 100644
--- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -1,18 +1,12 @@
package scala.reflect.macros
package compiler
-import scala.reflect.internal.Flags._
-import scala.reflect.macros.TypecheckException
-
trait Resolvers {
self: DefaultMacroCompiler =>
import global._
import analyzer._
- import definitions._
import treeInfo._
- import gen._
- import runDefinitions._
trait Resolver {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index a146818ae3..fc932f2b18 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -9,7 +9,7 @@ trait Validators {
import global._
import analyzer._
import definitions._
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
trait Validator {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index df7aa4d2be..7088058145 100644
--- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -12,5 +12,5 @@ trait Infrastructure {
def compilerSettings: List[String] = universe.settings.recreateArgs
- def classPath: List[java.net.URL] = global.classPath.asURLs
+ def classPath: List[java.net.URL] = global.classPath.asURLs.toList
}
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index bddc42d1f9..961c41dab5 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -54,14 +54,10 @@ trait Helpers {
*
* @see Metalevels.scala for more information and examples about metalevels
*/
- def increaseMetalevel(pre: Type, tp: Type): Type = {
- val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
-
+ def increaseMetalevel(pre: Type, tp: Type): Type =
transparentShallowTransform(RepeatedParamClass, tp) {
case tp => typeRef(pre, MacroContextExprClass, List(tp))
}
- }
/** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
*/
diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
index 007bac27da..97ec7dbfc3 100644
--- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
@@ -90,7 +90,7 @@ trait Parsers { self: Quasiquotes =>
case _ => super.makePatDef(mods, pat, rhs)
}
}
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _}
// q"def foo($x)"
override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
index 07becdc3c6..cc98717c4e 100644
--- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
@@ -8,7 +8,6 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
import global.build._
- import global.treeInfo._
import global.definitions._
import Rank._
import universeTypes._
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index b1cc797389..a3e0f02dcc 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -21,7 +21,6 @@ abstract class Reifier extends States
import global._
import definitions._
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
val typer: global.analyzer.Typer
val universe: Tree
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index 093c2bee22..0863ee38f9 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -79,8 +79,7 @@ abstract class Taggers {
try materializer
catch {
case ReificationException(pos, msg) =>
- c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
- EmptyTree
+ c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
case UnexpectedReificationException(pos, err, cause) if cause != null =>
throw cause
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 4512b2cb6f..de9fec0df5 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,10 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import treeInfo._
- import definitions._
- private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 1747405f03..8905c94eeb 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -97,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f58223a39e..7acb3632d2 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -86,10 +86,14 @@ fi
TOOL_CLASSPATH="@classpath@"
if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [[ -z "$TOOL_CLASSPATH" ]]; then
- TOOL_CLASSPATH="$ext"
- else
- TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ file_extension="${ext##*.}"
+ # SI-8967 Only consider directories and files named '*.jar'
+ if [[ -d "$ext" || $file_extension == "jar" ]]; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
+ TOOL_CLASSPATH="$ext"
+ else
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ fi
fi
done
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index cf0e003f10..50e44fb669 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -128,7 +128,7 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f"
for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
)
diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
new file mode 100644
index 0000000000..2faf6c6272
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc
+
+import scala.io.StdIn.readLine
+
+/**
+ * Simple application to check out amount of memory used by chosen classpath representation.
+ * It allows us to create many scalac-like calls based on specified parameters, where each main retains Global.
+ * And we need additional tool (e.g. profiler) to measure memory consumption itself.
+ */
+object ClassPathMemoryConsumptionTester {
+
+ private class TestSettings extends Settings {
+ val requiredInstances = IntSetting("-requiredInstances",
+ "Determine how many times classpath should be loaded", 10, Some((1, 10000)), (_: String) => None)
+ }
+
+ private class MainRetainsGlobal extends scala.tools.nsc.MainClass {
+ var retainedGlobal: Global = _
+ override def doCompile(compiler: Global) {
+ retainedGlobal = compiler
+ super.doCompile(compiler)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ if (args contains "-help") usage()
+ else doTest(args)
+ }
+
+ private def doTest(args: Array[String]) = {
+ val settings = loadSettings(args.toList)
+
+ val mains = (1 to settings.requiredInstances.value) map (_ => new MainRetainsGlobal)
+
+ // we need original settings without additional params to be able to use them later
+ val baseArgs = argsWithoutRequiredInstances(args)
+
+ println(s"Loading classpath ${settings.requiredInstances.value} times")
+ val startTime = System.currentTimeMillis()
+
+ mains map (_.process(baseArgs))
+
+ val elapsed = System.currentTimeMillis() - startTime
+ println(s"Operation finished - elapsed $elapsed ms")
+ println("Memory consumption can be now measured")
+
+ var textFromStdIn = ""
+ while (textFromStdIn.toLowerCase != "exit")
+ textFromStdIn = readLine("Type 'exit' to close application: ")
+ }
+
+ /**
+ * Prints usage information
+ */
+ private def usage(): Unit =
+ println( """Use classpath and sourcepath options like in the case of e.g. 'scala' command.
+ | There's also one additional option:
+ | -requiredInstances <int value> Determine how many times classpath should be loaded
+ """.stripMargin.trim)
+
+ private def loadSettings(args: List[String]) = {
+ val settings = new TestSettings()
+ settings.processArguments(args, processAll = true)
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+ settings
+ }
+
+ private def argsWithoutRequiredInstances(args: Array[String]) = {
+ val instancesIndex = args.indexOf("-requiredInstances")
+ if (instancesIndex == -1) args
+ else args.dropRight(args.length - instancesIndex) ++ args.drop(instancesIndex + 2)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 0a356ed7b6..1a6843a249 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
-import scala.tools.nsc.reporters.Reporter
trait CompilationUnits { global: Global =>
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 3017d8c9cc..f259504473 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
info(vmArgs.mkString("[VM arguments: ", " ", "]"))
val socket =
- if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown)
- else Some(compileSocket.getSocket(settings.server.value))
+ if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value)
+ else compileSocket.getSocket(settings.server.value)
socket match {
case Some(sock) => compileOnServer(sock, fscArgs)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 029e1c4629..aa02957a6c 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
import java.io.PrintStream
+import io.Directory
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.{FakePos, Position}
import scala.tools.util.SocketServer
@@ -19,7 +20,7 @@ import settings.FscSettings
* @author Martin Odersky
* @version 1.0
*/
-class StandardCompileServer extends SocketServer {
+class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) {
lazy val compileSocket: CompileSocket = CompileSocket
private var compiler: Global = null
@@ -166,12 +167,12 @@ class StandardCompileServer extends SocketServer {
}
-object CompileServer extends StandardCompileServer {
+object CompileServer {
/** A directory holding redirected output */
- private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
+ //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def createRedirect(filename: String) =
- new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+ private def createRedirect(dir: Directory, filename: String) =
+ new PrintStream((dir / filename).createFile().bufferedOutput())
def main(args: Array[String]) =
execute(() => (), args)
@@ -187,21 +188,33 @@ object CompileServer extends StandardCompileServer {
*/
def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
+ var port = 0
+ val i = args.indexOf("-p")
+ if (i >= 0 && args.length > i + 1) {
+ scala.util.control.Exception.ignoring(classOf[NumberFormatException]) {
+ port = args(i + 1).toInt
+ }
+ }
+
+ // Create instance rather than extend to pass a port parameter.
+ val server = new StandardCompileServer(port)
+ val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory()
+
if (debug) {
- echo("Starting CompileServer on port " + port)
- echo("Redirect dir is " + redirectDir)
+ server.echo("Starting CompileServer on port " + server.port)
+ server.echo("Redirect dir is " + redirectDir)
}
- Console.withErr(createRedirect("scala-compile-server-err.log")) {
- Console.withOut(createRedirect("scala-compile-server-out.log")) {
- Console.err.println("...starting server on socket "+port+"...")
+ Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) {
+ Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+server.port+"...")
Console.err.flush()
- compileSocket setPort port
+ server.compileSocket setPort server.port
startupCallback()
- run()
+ server.run()
- compileSocket deletePort port
+ server.compileSocket deletePort server.port
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index c693fbe8e2..27a14141fa 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -46,6 +46,9 @@ trait HasCompileSocket {
class CompileSocket extends CompileOutputCommon {
protected lazy val compileClient: StandardCompileClient = CompileClient
def verbose = compileClient.verbose
+
+ /* Fixes the port where to start the server, 0 yields some free port */
+ var fixPort = 0
/** The prefix of the port identification file, which is followed
* by the port number.
@@ -64,7 +67,7 @@ class CompileSocket extends CompileOutputCommon {
/** The class name of the scala compile server */
protected val serverClass = "scala.tools.nsc.CompileServer"
- protected def serverClassArgs = if (verbose) List("-v") else Nil // debug
+ protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
/** A temporary directory to use */
val tmpDir = {
@@ -104,9 +107,14 @@ class CompileSocket extends CompileOutputCommon {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int = portsDir.list.toList match {
+ private def pollPort(): Int = if (fixPort > 0) {
+ if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1
+ } else portsDir.list.toList match {
case Nil => -1
- case x :: xs => try x.name.toInt finally xs foreach (_.delete())
+ case x :: xs => try x.name.toInt catch {
+ case e: Exception => x.delete()
+ throw e
+ }
}
/** Get the port number to which a scala compile server is connected;
@@ -152,7 +160,8 @@ class CompileSocket extends CompileOutputCommon {
* create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = {
+ fixPort = fixedPort
val maxMillis = 10L * 1000 // try for 10 seconds
val retryDelay = 50L
val maxAttempts = (maxMillis / retryDelay).toInt
@@ -186,14 +195,17 @@ class CompileSocket extends CompileOutputCommon {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
+ def getSocket(serverAdr: String): Option[Socket] = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
- def getSocket(hostName: String, port: Int): Socket =
- Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
-
+ def getSocket(hostName: String, port: Int): Option[Socket] = {
+ val sock = Socket(hostName, port).opt
+ if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port))
+ sock
+ }
+
def getPassword(port: Int): String = {
val ff = portFile(port)
val f = ff.bufferedReader()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index ad75d02bff..1289d55c37 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -5,10 +5,11 @@
package scala.tools.nsc
-import scala.tools.util.PathResolver
+import java.net.URL
+import scala.tools.util.PathResolverFactory
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs = new PathResolver(this).asURLs
+ def classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 452081cff1..733664c30a 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,18 +8,17 @@ package tools
package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import java.util.UUID._
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
+import util.{ ClassFileLookup, ClassPath, MergedClassPath, StatisticsInfo, returning }
import scala.reflect.ClassTag
-import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
-import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import scala.reflect.io.VirtualFile
-import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.pickling.PickleBuffer
+import symtab.{ Flags, SymbolTable, SymbolTrackers }
import symtab.classfile.Pickler
import plugins.Plugins
import ast._
@@ -28,13 +27,15 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.{ ScalaPrimitives, JavaPlatform }
import backend.jvm.GenBCode
import backend.jvm.GenASM
import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -58,7 +59,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+ def rootLoader: LazyType = {
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
+ case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath)
+ }
+ }
override def toString = "compiler mirror"
}
implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
@@ -104,7 +110,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type PlatformClassPath = ClassPath[AbstractFile]
type OptClassPath = Option[PlatformClassPath]
- def classPath: PlatformClassPath = platform.classPath
+ def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => flatClassPath
+ case ClassPathRepresentationType.Recursive => recursiveClassPath
+ }
+
+ private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath
+
+ private def flatClassPath: FlatClassPath = platform.flatClassPath
// sub-components --------------------------------------------------
@@ -319,7 +332,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
+ val charset = settings.encoding.valueSetByUser flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -334,16 +347,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
+ settings.sourceReader.valueSetByUser flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (settings.verbose || settings.Ylogcp) {
+ if (settings.verbose || settings.Ylogcp)
reporter.echo(
- s"[search path for source files: ${classPath.sourcepaths.mkString(",")}]\n"+
- s"[search path for class files: ${classPath.asClasspathString}")
- }
+ s"[search path for source files: ${classPath.asSourcePathString}]\n" +
+ s"[search path for class files: ${classPath.asClassPathString}]"
+ )
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
@@ -842,6 +855,156 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} reverse
}
+ // ------------ REPL utilities ---------------------------------
+
+ /** Extend classpath of `platform` and rescan updated packages. */
+ def extendCompilerClassPath(urls: URL*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support extending the compiler classpath")
+
+ val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ platform.currentClassPath = Some(newClassPath)
+ // Reload all specified jars into this compiler instance
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass))
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ *
+ * First, the classpath entry referred to by one of the `paths` is rescanned,
+ * so that any new files or changes in subpackages are picked up.
+ * Second, any packages for which one of the following conditions is met is invalidated:
+ * - the classpath entry contained during the last compilation run now contains classfiles
+ * that represent a member in the package;
+ * - the classpath entry now contains classfiles that represent a member in the package;
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ *
+ * System packages that the compiler needs to access as part of standard definitions
+ * are not invalidated. A system package is:
+ * Any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ *
+ * @param paths Fully-qualified names that refer to directories or jar files that are
+ * entries on the classpath.
+ */
+ def invalidateClassPathEntries(paths: String*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support the classpath invalidation")
+
+ implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
+ def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClassPathString compare b.asClassPathString
+ }
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile.getDirectory(path)
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, recursiveClassPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ mergeNewEntries(newEntries, RootClass, Some(recursiveClassPath), Some(oldEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ }
+
+ /** Merges new classpath entries into the symbol table
+ *
+ * @param newEntries The new classpath entries
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classpath
+ * @param oldEntries Optionally, the corresponding package in the old classpath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ *
+ * The merging strategy is determined by the absence or presence of classes and packages.
+ *
+ * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
+ * exists in allEntries. Otherwise it is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old sym action
+ * + + recurse into all child packages of newEntries
+ * - + invalidate root
+ * - - create and enter root
+ *
+ * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
+ */
+ private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[AbstractFile] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += root
+ }
+ if (!oldEntries.isDefined) invalidateOrRemove(root)
+ else
+ for (pstr <- newEntries.packages.map(getName)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package does not exist in symbol table, create symbol to track it
+ assert(!subPackage(oldEntries.get, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
+ subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
+ invalidated, failed)
+ }
+ }
+ }
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -1232,13 +1395,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** does this run compile given class, module, or case factory? */
// NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
- // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+ // Here we work around that wrinkle by claiming that a early-initialized member is compiled in
// *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (sym.isTopLevel && sym.isEarlyInitialized) true
- else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClassOrDummy)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1447,10 +1609,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Reset package class to state at typer (not sure what this
- * is needed for?)
+ /** Reset package class to state at typer (not sure what this is needed for?)
*/
- private def resetPackageClass(pclazz: Symbol) {
+ private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) {
enteringPhase(firstPhase) {
pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index 95264aeda6..7c14f4943f 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -18,14 +18,14 @@ trait CommonRunner {
* @throws NoSuchMethodException
* @throws InvocationTargetException
*/
- def run(urls: List[URL], objectName: String, arguments: Seq[String]) {
+ def run(urls: Seq[URL], objectName: String, arguments: Seq[String]) {
(ScalaClassLoader fromURLs urls).run(objectName, arguments)
}
/** Catches exceptions enumerated by run (in the case of InvocationTargetException,
* unwrapping it) and returns it any thrown in Left(x).
*/
- def runAndCatch(urls: List[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
+ def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
try { run(urls, objectName, arguments) ; Right(true) }
catch { case e: Throwable => Left(unwrap(e)) }
}
diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala
index 4dd3c3f378..9e5999ce4f 100644
--- a/src/compiler/scala/tools/nsc/Parsing.scala
+++ b/src/compiler/scala/tools/nsc/Parsing.scala
@@ -7,7 +7,6 @@ package scala
package tools.nsc
import scala.reflect.internal.Positions
-import scala.tools.nsc.reporters.Reporter
/** Similar to Reporting: gather global functionality specific to parsing.
*/
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cfb4cd23a1..1eb6c9da2c 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -199,7 +199,7 @@ trait PhaseAssembly {
// Add all phases in the set to the graph
val graph = phasesSetToDepGraph(phasesSet)
- val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None
+ val dot = settings.genPhaseGraph.valueSetByUser
// Output the phase dependency graph at this stage
def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot"))
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index bec686ec05..9f160e2485 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -14,7 +14,9 @@ object Properties extends scala.util.PropertiesTrait {
// settings based on jar properties, falling back to System prefixed by "scala."
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- def shellInterruptedString = scalaPropOrElse("shell.interrupted", ":quit\n")
+ // message to display at EOF (which by default ends with
+ // a newline so as not to break the user's terminal)
+ def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator")
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala
index c9782de7c8..4d7e9e753f 100644
--- a/src/compiler/scala/tools/nsc/Reporting.scala
+++ b/src/compiler/scala/tools/nsc/Reporting.scala
@@ -7,7 +7,6 @@ package scala
package tools
package nsc
-import reporters.{ Reporter, ConsoleReporter }
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.countElementsAsString
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 7d5c6f6fff..6d24b31531 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -8,7 +8,10 @@ package tools.nsc
import io.{ AbstractFile, Directory, File, Path }
import java.io.IOException
+import scala.tools.nsc.classpath.DirectoryFlatClassPath
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
@@ -112,8 +115,10 @@ class ScriptRunner extends HasCompileSocket {
}
def hasClassToRun(d: Directory): Boolean = {
- import util.ClassPath.{ DefaultJavaContext => ctx }
- val cp = ctx.newClassPath(AbstractFile.getDirectory(d))
+ val cp = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d))
+ case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile)
+ }
cp.findClass(mainClass).isDefined
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index d3f495f280..f1517e56a0 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -346,12 +346,11 @@ trait MarkupParsers {
// parse more XML ?
if (charComingAfter(xSpaceOpt()) == '<') {
- xSpaceOpt()
- while (ch == '<') {
+ do {
+ xSpaceOpt()
nextch()
ts append element
- xSpaceOpt()
- }
+ } while (charComingAfter(xSpaceOpt()) == '<')
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
else {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 15d6a4d1b4..4663810003 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1559,7 +1559,7 @@ self =>
}
/** {{{
- * PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ * PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
* }}}
*/
def prefixExpr(): Tree = {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 9ebc94b5fc..92833d647b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -453,18 +453,15 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
case '0' =>
- def fetchZero() = {
- putChar(ch)
+ def fetchLeadingZero(): Unit = {
nextChar()
- if (ch == 'x' || ch == 'X') {
- nextChar()
- base = 16
- } else {
- base = 8
+ ch match {
+ case 'x' | 'X' => base = 16 ; nextChar()
+ case _ => base = 8 // single decimal zero, perhaps
}
- getNumber()
}
- fetchZero()
+ fetchLeadingZero()
+ getNumber()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -902,62 +899,61 @@ trait Scanners extends ScannersCommon {
*/
def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
- /** Convert current strVal, base to long value
+ /** Convert current strVal, base to long value.
* This is tricky because of max negative value.
+ *
+ * Conversions in base 10 and 16 are supported. As a permanent migration
+ * path, attempts to write base 8 literals except `0` emit a verbose error.
*/
def intVal(negated: Boolean): Long = {
- if (token == CHARLIT && !negated) {
- charVal.toLong
- } else {
- var value: Long = 0
- val divider = if (base == 10) 1 else 2
- val limit: Long =
- if (token == LONGLIT) Long.MaxValue else Int.MaxValue
- var i = 0
+ def malformed: Long = {
+ if (base == 8) syntaxError("Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)")
+ else syntaxError("malformed integer number")
+ 0
+ }
+ def tooBig: Long = {
+ syntaxError("integer number too large")
+ 0
+ }
+ def intConvert: Long = {
val len = strVal.length
- while (i < len) {
- val d = digit2int(strVal charAt i, base)
- if (d < 0) {
- syntaxError("malformed integer number")
- return 0
- }
- if (value < 0 ||
- limit / (base / divider) < value ||
- limit - (d / divider) < value * (base / divider) &&
- !(negated && limit == value * base - 1 + d)) {
- syntaxError("integer number too large")
- return 0
- }
- value = value * base + d
- i += 1
+ if (len == 0) {
+ if (base != 8) syntaxError("missing integer number") // e.g., 0x;
+ 0
+ } else {
+ val divider = if (base == 10) 1 else 2
+ val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue
+ @tailrec def convert(value: Long, i: Int): Long =
+ if (i >= len) value
+ else {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0)
+ malformed
+ else if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d))
+ tooBig
+ else
+ convert(value * base + d, i + 1)
+ }
+ val result = convert(0, 0)
+ if (base == 8) malformed else if (negated) -result else result
}
- if (negated) -value else value
}
+ if (token == CHARLIT && !negated) charVal.toLong else intConvert
}
def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
- */
+ */
def floatVal(negated: Boolean): Double = {
-
- val limit: Double =
- if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
- def isDeprecatedForm = {
- val idx = strVal indexOf '.'
- (idx == strVal.length - 1) || (
- (idx >= 0)
- && (idx + 1 < strVal.length)
- && (!Character.isDigit(strVal charAt (idx + 1)))
- )
- }
if (value > limit)
syntaxError("floating point number too large")
- if (isDeprecatedForm)
- syntaxError("floating point number is missing digit after dot")
-
if (negated) -value else value
} catch {
case _: NumberFormatException =>
@@ -968,86 +964,44 @@ trait Scanners extends ScannersCommon {
def floatVal: Double = floatVal(negated = false)
- def checkNoLetter(): Unit = {
+ def checkNoLetter(): Unit = {
if (isIdentifierPart(ch) && ch >= ' ')
syntaxError("Invalid literal number")
}
- /** Read a number into strVal and set base */
- protected def getNumber(): Unit = {
- val base1 = if (base < 10) 10 else base
- // Read 8,9's even if format is octal, produce a malformed number error afterwards.
- // At this point, we have already read the first digit, so to tell an innocent 0 apart
- // from an octal literal 0123... (which we want to disallow), we check whether there
- // are any additional digits coming after the first one we have already read.
- var notSingleZero = false
- while (digit2int(ch, base1) >= 0) {
- putChar(ch)
- nextChar()
- notSingleZero = true
- }
- token = INTLIT
-
- /* When we know for certain it's a number after using a touch of lookahead */
- def restOfNumber() = {
- putChar(ch)
- nextChar()
+ /** Read a number into strVal.
+ *
+ * The `base` can be 8, 10 or 16, where base 8 flags a leading zero.
+ * For ints, base 8 is legal only for the case of exactly one zero.
+ */
+ protected def getNumber(): Unit = {
+ // consume digits of a radix
+ def consumeDigits(radix: Int): Unit =
+ while (digit2int(ch, radix) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ // adding decimal point is always OK because `Double valueOf "0."` is OK
+ def restOfNonIntegralNumber(): Unit = {
+ putChar('.')
+ if (ch == '.') nextChar()
getFraction()
}
- def restOfUncertainToken() = {
- def isEfd = ch match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => true ; case _ => false }
- def isL = ch match { case 'l' | 'L' => true ; case _ => false }
-
- if (base <= 10 && isEfd)
- getFraction()
- else {
- // Checking for base == 8 is not enough, because base = 8 is set
- // as soon as a 0 is read in `case '0'` of method fetchToken.
- if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
- setStrVal()
- if (isL) {
- nextChar()
- token = LONGLIT
- }
- else checkNoLetter()
+ // after int: 5e7f, 42L, 42.toDouble but not 42b. Repair 0d.
+ def restOfNumber(): Unit = {
+ ch match {
+ case 'e' | 'E' | 'f' | 'F' |
+ 'd' | 'D' => if (cbuf.isEmpty) putChar('0'); restOfNonIntegralNumber()
+ case 'l' | 'L' => token = LONGLIT ; setStrVal() ; nextChar()
+ case _ => token = INTLIT ; setStrVal() ; checkNoLetter()
}
}
- if (base > 10 || ch != '.')
- restOfUncertainToken()
- else {
- val lookahead = lookaheadReader
- val c = lookahead.getc()
-
- /* Prohibit 1. */
- if (!isDigit(c))
- return setStrVal()
-
- val isDefinitelyNumber = (c: @switch) match {
- /** Another digit is a giveaway. */
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- true
+ // consume leading digits, provisionally an Int
+ consumeDigits(if (base == 16) 16 else 10)
- /* Backquoted idents like 22.`foo`. */
- case '`' =>
- return setStrVal() /** Note the early return */
-
- /* These letters may be part of a literal, or a method invocation on an Int.
- */
- case 'd' | 'D' | 'f' | 'F' =>
- !isIdentifierPart(lookahead.getc())
-
- /* A little more special handling for e.g. 5e7 */
- case 'e' | 'E' =>
- val ch = lookahead.getc()
- !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
-
- case x =>
- !isIdentifierStart(x)
- }
- if (isDefinitelyNumber) restOfNumber()
- else restOfUncertainToken()
- }
+ val detectedFloat: Boolean = base != 16 && ch == '.' && isDigit(lookaheadReader.getc)
+ if (detectedFloat) restOfNonIntegralNumber() else restOfNumber()
}
/** Parse character literal if current character is followed by \',
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 1abc0c860c..8cd915bf22 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -184,7 +184,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
)
val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) =>
+ mkAssign(uri)
case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
case x => mkAssign(x)
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 7236bf70d5..6bd123c51f 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,7 +7,10 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,MergedClassPath,DeltaClassPath}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, DeltaClassPath, MergedClassPath }
+import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
@@ -16,13 +19,23 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
+ private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
def classPath: ClassPath[AbstractFile] = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
+ "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.")
+
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
+ private[nsc] lazy val flatClassPath: FlatClassPath = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
+ "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.")
+
+ new FlatClassPathResolver(settings).result
+ }
+
/** Update classpath with a substituted subentry */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index 439cc1efb8..c3bc213be1 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -8,6 +8,7 @@ package backend
import util.ClassPath
import io.AbstractFile
+import scala.tools.nsc.classpath.FlatClassPath
/** The platform dependent pieces of Global.
*/
@@ -15,9 +16,12 @@ trait Platform {
val symbolTable: symtab.SymbolTable
import symbolTable._
- /** The compiler classpath. */
+ /** The old, recursive implementation of compiler classpath. */
def classPath: ClassPath[AbstractFile]
+ /** The new implementation of compiler classpath. */
+ private[nsc] def flatClassPath: FlatClassPath
+
/** Update classpath with a substitution that maps entries to entries */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index f9551697d2..ad1975ef23 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -300,14 +300,16 @@ trait BasicBlocks {
if (!closed)
instructionList = instructionList map (x => map.getOrElse(x, x))
else
- instrs.zipWithIndex collect {
- case (oldInstr, i) if map contains oldInstr =>
- // SI-6288 clone important here because `replaceInstruction` assigns
- // a position to `newInstr`. Without this, a single instruction can
- // be added twice, and the position last position assigned clobbers
- // all previous positions in other usages.
- val newInstr = map(oldInstr).clone()
- code.touched |= replaceInstruction(i, newInstr)
+ instrs.iterator.zipWithIndex foreach {
+ case (oldInstr, i) =>
+ if (map contains oldInstr) {
+ // SI-6288 clone important here because `replaceInstruction` assigns
+ // a position to `newInstr`. Without this, a single instruction can
+ // be added twice, and the position last position assigned clobbers
+ // all previous positions in other usages.
+ val newInstr = map(oldInstr).clone()
+ code.touched |= replaceInstruction(i, newInstr)
+ }
}
////////////////////// Emit //////////////////////
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index bc35a9e7de..10f0c6ee00 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -113,7 +113,8 @@ abstract class ICodes extends AnyRef
global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
lazy val symbolTable: global.type = global
lazy val loaders: global.loaders.type = global.loaders
- def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
+
+ def classFileLookup: util.ClassFileLookup[AbstractFile] = global.classPath
}
/** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
index 2af2037fec..7269910af6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc.backend.jvm
-import scala.tools.asm.tree.{ClassNode, MethodNode}
+import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode}
import java.io.PrintWriter
import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier}
import scala.tools.asm.ClassReader
@@ -58,4 +58,9 @@ object AsmUtils {
new ClassReader(bytes).accept(node, 0)
node
}
+
+ def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match {
+ case -1 => instruction.toString
+ case op => scala.tools.asm.util.Printer.OPCODES(op)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
index 54d4a30553..328ec8a033 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc.backend.jvm
import scala.tools.nsc.Global
-import PartialFunction._
/**
* This trait contains code shared between GenBCode and GenASM that depends on types defined in
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
index 0e2f938602..3b7cbd6392 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
@@ -195,32 +195,13 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
case _ => NoSymbol
}
- /**
- * Drop redundant interfaces (which are implemented by some other parent) from the immediate
- * parents. In other words, no two interfaces in the result are related by subtyping.
- */
- def dropRedundantInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while (!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if (!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
val superInterfaces0: List[Symbol] = classSym.mixinClasses
val superInterfaces = existingSymbols(superInterfaces0 ++ classSym.annotations.map(newParentForAnnotation)).distinct
assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString(", ")}")
assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString(", ")}")
- dropRedundantInterfaces(superInterfaces)
+ erasure.minimizeInterfaces(superInterfaces.map(_.info)).map(_.typeSymbol)
}
private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
index 4b9383c67c..03306f30aa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
@@ -14,7 +14,7 @@ object BackendStats {
val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer)
val bcodeGenStat = newSubTimer("code generation", bcodeTimer)
- val bcodeDceTimer = newSubTimer("dead code elimination", bcodeTimer)
+ val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer)
val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer)
def timed[T](timer: Statistics.Timer)(body: => T): T = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index d0a12c32e5..e56a20c2e7 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1214,30 +1214,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
case _ => NoSymbol
}
- /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
val ps = c.symbol.info.parents
val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
+ else mkArray(erasure.minimizeInterfaces(superInterfaces.map(_.info)).map(t => javaName(t.typeSymbol)))
}
var clasz: IClass = _ // this var must be assigned only by genClass()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index ba94a9c44c..a45f586666 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -9,12 +9,12 @@ package tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
-import scala.annotation.switch
+import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.tools.asm
import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.opt.LocalOpt
/*
* Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
@@ -215,13 +215,10 @@ abstract class GenBCode extends BCodeSyncAndTry {
* - converting the plain ClassNode to byte array and placing it on queue-3
*/
class Worker2 {
- def localOptimizations(classNode: ClassNode): Unit = {
- def dce(): Boolean = BackendStats.timed(BackendStats.bcodeDceTimer) {
- if (settings.YoptUnreachableCode) opt.LocalOpt.removeUnreachableCode(classNode)
- else false
- }
+ lazy val localOpt = new LocalOpt(settings)
- dce()
+ def localOptimizations(classNode: ClassNode): Unit = {
+ BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode))
}
def run() {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
new file mode 100644
index 0000000000..6b4047c0a7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -0,0 +1,184 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable
+import scala.reflect.internal.util.Collections._
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree._
+import scala.collection.convert.decorateAsScala._
+
+object BytecodeUtils {
+
+ object Goto {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (instruction.getOpcode == Opcodes.GOTO) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object JumpNonJsr {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isJumpNonJsr(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object ConditionalJump {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isConditionalJump(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object VarInstruction {
+ def unapply(instruction: AbstractInsnNode): Option[VarInsnNode] = {
+ if (isVarInstruction(instruction)) Some(instruction.asInstanceOf[VarInsnNode])
+ else None
+ }
+
+ }
+
+ def isJumpNonJsr(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ // JSR is deprecated in classfile version 50, disallowed in 51. historically, it was used to implement finally.
+ op == Opcodes.GOTO || isConditionalJump(instruction)
+ }
+
+ def isConditionalJump(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.IFEQ && op <= Opcodes.IF_ACMPNE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL
+ }
+
+ def isReturn(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ op >= Opcodes.IRETURN && op <= Opcodes.RETURN
+ }
+
+ def isVarInstruction(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.ILOAD && op <= Opcodes.ALOAD) || (op >= Opcodes.ISTORE && op <= Opcodes.ASTORE)
+ }
+
+ def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0
+
+ def nextExecutableInstruction(instruction: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = {
+ var result = instruction
+ do { result = result.getNext }
+ while (result != null && !isExecutable(result) && !alsoKeep(result))
+ Option(result)
+ }
+
+ def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = {
+ // Compare next executable instead of the the labels. Identifies a, b as the same target:
+ // LabelNode(a)
+ // LabelNode(b)
+ // Instr
+ nextExecutableInstruction(a.label) == nextExecutableInstruction(b.label)
+ }
+
+ def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) {
+ val instructions = method.instructions
+ val op = jump.getOpcode
+ if ((op >= Opcodes.IFEQ && op <= Opcodes.IFGE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL) {
+ instructions.insert(jump, getPop(1))
+ } else if ((op >= Opcodes.IF_ICMPEQ && op <= Opcodes.IF_ICMPLE) || op == Opcodes.IF_ACMPEQ || op == Opcodes.IF_ACMPNE) {
+ instructions.insert(jump, getPop(1))
+ instructions.insert(jump, getPop(1))
+ } else {
+ // we can't remove JSR: its execution does not only jump, it also adds a return address to the stack
+ assert(jump.getOpcode == Opcodes.GOTO)
+ }
+ instructions.remove(jump)
+ }
+
+ def finalJumpTarget(source: JumpInsnNode): LabelNode = {
+ @tailrec def followGoto(label: LabelNode, seenLabels: Set[LabelNode]): LabelNode = nextExecutableInstruction(label) match {
+ case Some(Goto(dest)) =>
+ if (seenLabels(dest.label)) dest.label
+ else followGoto(dest.label, seenLabels + dest.label)
+
+ case _ => label
+ }
+ followGoto(source.label, Set(source.label))
+ }
+
+ def negateJumpOpcode(jumpOpcode: Int): Int = (jumpOpcode: @switch) match {
+ case Opcodes.IFEQ => Opcodes.IFNE
+ case Opcodes.IFNE => Opcodes.IFEQ
+
+ case Opcodes.IFLT => Opcodes.IFGE
+ case Opcodes.IFGE => Opcodes.IFLT
+
+ case Opcodes.IFGT => Opcodes.IFLE
+ case Opcodes.IFLE => Opcodes.IFGT
+
+ case Opcodes.IF_ICMPEQ => Opcodes.IF_ICMPNE
+ case Opcodes.IF_ICMPNE => Opcodes.IF_ICMPEQ
+
+ case Opcodes.IF_ICMPLT => Opcodes.IF_ICMPGE
+ case Opcodes.IF_ICMPGE => Opcodes.IF_ICMPLT
+
+ case Opcodes.IF_ICMPGT => Opcodes.IF_ICMPLE
+ case Opcodes.IF_ICMPLE => Opcodes.IF_ICMPGT
+
+ case Opcodes.IF_ACMPEQ => Opcodes.IF_ACMPNE
+ case Opcodes.IF_ACMPNE => Opcodes.IF_ACMPEQ
+
+ case Opcodes.IFNULL => Opcodes.IFNONNULL
+ case Opcodes.IFNONNULL => Opcodes.IFNULL
+ }
+
+ def getPop(size: Int): InsnNode = {
+ val op = if (size == 1) Opcodes.POP else Opcodes.POP2
+ new InsnNode(op)
+ }
+
+ def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
+ val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
+ def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
+
+ method.instructions.iterator().asScala foreach {
+ case jump: JumpInsnNode => add(jump.label, jump)
+ case line: LineNumberNode => add(line.start, line)
+ case switch: LookupSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case switch: TableSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case _ =>
+ }
+ if (method.localVariables != null) {
+ method.localVariables.iterator().asScala.foreach(l => { add(l.start, l); add(l.end, l) })
+ }
+ if (method.tryCatchBlocks != null) {
+ method.tryCatchBlocks.iterator().asScala.foreach(l => { add(l.start, l); add(l.handler, l); add(l.end, l) })
+ }
+
+ res.toMap
+ }
+
+ def substituteLabel(reference: AnyRef, from: LabelNode, to: LabelNode): Unit = {
+ def substList(list: java.util.List[LabelNode]) = {
+ foreachWithIndex(list.asScala.toList) { case (l, i) =>
+ if (l == from) list.set(i, to)
+ }
+ }
+ reference match {
+ case jump: JumpInsnNode => jump.label = to
+ case line: LineNumberNode => line.start = to
+ case switch: LookupSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case switch: TableSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case local: LocalVariableNode =>
+ if (local.start == from) local.start = to
+ if (local.end == from) local.end = to
+ case handler: TryCatchBlockNode =>
+ if (handler.start == from) handler.start = to
+ if (handler.handler == from) handler.handler = to
+ if (handler.end == from) handler.end = to
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
index 3acd2d6154..273112b93c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
@@ -7,125 +7,206 @@ package scala.tools.nsc
package backend.jvm
package opt
+import scala.annotation.switch
import scala.tools.asm.{Opcodes, MethodWriter, ClassWriter}
import scala.tools.asm.tree.analysis.{Analyzer, BasicValue, BasicInterpreter}
import scala.tools.asm.tree._
import scala.collection.convert.decorateAsScala._
-import scala.collection.{ mutable => m }
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+import scala.tools.nsc.settings.ScalaSettings
/**
- * Intra-Method optimizations.
+ * Optimizations within a single method.
+ *
+ * unreachable code
+ * - removes instrucions of basic blocks to which no branch instruction points
+ * + enables eliminating some exception handlers and local variable descriptors
+ * > eliminating them is required for correctness, as explained in `removeUnreachableCode`
+ *
+ * empty exception handlers
+ * - removes exception handlers whose try block is empty
+ * + eliminating a handler where the try block is empty and reachable will turn the catch block
+ * unreachble. in this case "unreachable code" is invoked recursively until reaching a fixpiont.
+ * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the
+ * catch block, and the recrusive invocation is not necessary.
+ *
+ * simplify jumps
+ * - various simplifications, see doc domments of individual optimizations
+ * + changing or eliminating jumps may render some code unreachable, therefore "simplify jumps" is
+ * executed in a loop with "unreachable code"
+ *
+ * empty local variable descriptors
+ * - removes entries from the local variable table where the variable is not actually used
+ * + enables eliminating labels that the entry points to (if they are not otherwise referenced)
+ *
+ * empty line numbers
+ * - eliminates line number nodes that describe no executable instructions
+ * + enables eliminating the label of the line number node (if it's not otherwise referenced)
+ *
+ * stale labels
+ * - eliminate labels that are not referenced, merge sequences of label definitions.
*/
-object LocalOpt {
+class LocalOpt(settings: ScalaSettings) {
/**
- * Remove unreachable instructions from all (non-abstract) methods.
+ * Remove unreachable instructions from all (non-abstract) methods and apply various other
+ * cleanups to the bytecode.
*
* @param clazz The class whose methods are optimized
* @return `true` if unreachable code was elminated in some method, `false` otherwise.
*/
- def removeUnreachableCode(clazz: ClassNode): Boolean = {
- clazz.methods.asScala.foldLeft(false) {
- case (changed, method) => removeUnreachableCode(method, clazz.name) || changed
+ def methodOptimizations(clazz: ClassNode): Boolean = {
+ settings.Yopt.value.nonEmpty && clazz.methods.asScala.foldLeft(false) {
+ case (changed, method) => methodOptimizations(method, clazz.name) || changed
}
}
/**
* Remove unreachable code from a method.
+ *
* We rely on dead code elimination provided by the ASM framework, as described in the ASM User
* Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only
* computes Frame information for reachable instructions. Instructions for which no Frame data is
* available after the analyis are unreachable.
*
- * TODO doc: it also removes empty handlers, unused local vars
+ * Also simplifies branching instructions, removes unused local variable descriptors, empty
+ * exception handlers, unnecessary label declarations and empty line number nodes.
*
- * Returns `true` if dead code in `method` has been eliminated.
+ * Returns `true` if the bytecode of `method` was changed.
*/
- private def removeUnreachableCode(method: MethodNode, ownerClassName: String): Boolean = {
+ private def methodOptimizations(method: MethodNode, ownerClassName: String): Boolean = {
if (method.instructions.size == 0) return false // fast path for abstract methods
- val codeRemoved = removeUnreachableCodeImpl(method, ownerClassName)
-
// unreachable-code also removes unused local variable nodes and empty exception handlers.
- // This is required for correctness: such nodes are not allowed to refer to instruction offsets
- // that don't exist (because they have been eliminated).
- val localsRemoved = removeUnusedLocalVariableNodes(method)
- val handlersRemoved = removeEmptyExceptionHandlers(method)
-
- // When eliminating a handler, the catch block becomes unreachable. The recursive invocation
- // removes these blocks.
- // Note that invoking removeUnreachableCode*Impl* a second time is not enough: removing the dead
- // catch block can render other handlers empty, which also have to be removed in turn.
- if (handlersRemoved) removeUnreachableCode(method, ownerClassName)
-
- // assert that we can leave local variable annotations as-is
+ // This is required for correctness, for example:
+ //
+ // def f = { return 0; try { 1 } catch { case _ => 2 } }
+ //
+ // The result after removeUnreachableCodeImpl:
+ //
+ // TRYCATCHBLOCK L0 L1 L2 java/lang/Exception
+ // L4
+ // ICONST_0
+ // IRETURN
+ // L0
+ // L1
+ // L2
+ //
+ // If we don't eliminate the handler, the ClassWriter emits:
+ //
+ // TRYCATCHBLOCK L0 L0 L0 java/lang/Exception
+ // L1
+ // ICONST_0
+ // IRETURN
+ // L0
+ //
+ // This triggers "ClassFormatError: Illegal exception table range in class file C". Similar
+ // for local variables in dead blocks. Maybe that's a bug in the ASM framework.
+
+ var recurse = true
+ var codeHandlersOrJumpsChanged = false
+ while (recurse) {
+ // unreachable-code, empty-handlers and simplify-jumps run until reaching a fixpoint (see doc on class LocalOpt)
+ val (codeRemoved, handlersRemoved, liveHandlerRemoved) = if (settings.YoptUnreachableCode) {
+ val (codeRemoved, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
+ val removedHandlers = removeEmptyExceptionHandlers(method)
+ (codeRemoved, removedHandlers.nonEmpty, removedHandlers.exists(h => liveLabels(h.start)))
+ } else {
+ (false, false, false)
+ }
+
+ val jumpsChanged = if (settings.YoptSimplifyJumps) simplifyJumps(method) else false
+
+ codeHandlersOrJumpsChanged ||= (codeRemoved || handlersRemoved || jumpsChanged)
+
+ // The doc comment of class LocalOpt explains why we recurse if jumpsChanged || liveHandlerRemoved
+ recurse = settings.YoptRecurseUnreachableJumps && (jumpsChanged || liveHandlerRemoved)
+ }
+
+ // (*) Removing stale local variable descriptors is required for correctness of unreachable-code
+ val localsRemoved =
+ if (settings.YoptCompactLocals) compactLocalVariables(method)
+ else if (settings.YoptUnreachableCode) removeUnusedLocalVariableNodes(method)() // (*)
+ else false
+
+ val lineNumbersRemoved = if (settings.YoptEmptyLineNumbers) removeEmptyLineNumbers(method) else false
+
+ val labelsRemoved = if (settings.YoptEmptyLabels) removeEmptyLabelNodes(method) else false
+
+ // assert that local variable annotations are empty (we don't emit them) - otherwise we'd have
+ // to eliminate those covering an empty range, similar to removeUnusedLocalVariableNodes.
def nullOrEmpty[T](l: java.util.List[T]) = l == null || l.isEmpty
assert(nullOrEmpty(method.visibleLocalVariableAnnotations), method.visibleLocalVariableAnnotations)
assert(nullOrEmpty(method.invisibleLocalVariableAnnotations), method.invisibleLocalVariableAnnotations)
- codeRemoved || localsRemoved || handlersRemoved
+ codeHandlersOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved
}
- private def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: String): Boolean = {
- val initialSize = method.instructions.size
- if (initialSize == 0) return false
-
+ /**
+ * Removes unreachable basic blocks.
+ *
+ * TODO: rewrite, don't use computeMaxLocalsMaxStack (runs a ClassWriter) / Analyzer. Too slow.
+ */
+ def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: String): (Boolean, Set[LabelNode]) = {
// The data flow analysis requires the maxLocals / maxStack fields of the method to be computed.
computeMaxLocalsMaxStack(method)
val a = new Analyzer[BasicValue](new BasicInterpreter)
a.analyze(ownerClassName, method)
val frames = a.getFrames
+ val initialSize = method.instructions.size
var i = 0
+ var liveLabels = Set.empty[LabelNode]
val itr = method.instructions.iterator()
while (itr.hasNext) {
- val ins = itr.next()
- // Don't remove label nodes: they might be referenced for example in a LocalVariableNode
- if (frames(i) == null && !ins.isInstanceOf[LabelNode]) {
- // Instruction iterators allow removing during iteration.
- // Removing is O(1): instructions are doubly linked list elements.
- itr.remove()
+ itr.next() match {
+ case l: LabelNode =>
+ if (frames(i) != null) liveLabels += l
+
+ case ins =>
+ // label nodes are not removed: they might be referenced for example in a LocalVariableNode
+ if (frames(i) == null || ins.getOpcode == Opcodes.NOP) {
+ // Instruction iterators allow removing during iteration.
+ // Removing is O(1): instructions are doubly linked list elements.
+ itr.remove()
+ }
}
i += 1
}
-
- method.instructions.size != initialSize
- }
-
- /**
- * Remove exception handlers that cover empty code blocks from all methods of `clazz`.
- * Returns `true` if any exception handler was eliminated.
- */
- def removeEmptyExceptionHandlers(clazz: ClassNode): Boolean = {
- clazz.methods.asScala.foldLeft(false) {
- case (changed, method) => removeEmptyExceptionHandlers(method) || changed
- }
+ (method.instructions.size != initialSize, liveLabels)
}
/**
* Remove exception handlers that cover empty code blocks. A block is considered empty if it
* consist only of labels, frames, line numbers, nops and gotos.
*
+ * There are no executable instructions that we can assume don't throw (eg ILOAD). The JVM spec
+ * basically says that a VirtualMachineError may be thrown at any time:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3
+ *
* Note that no instructions are eliminated.
*
- * @return `true` if some exception handler was eliminated.
+ * @return the set of removed handlers
*/
- def removeEmptyExceptionHandlers(method: MethodNode): Boolean = {
+ def removeEmptyExceptionHandlers(method: MethodNode): Set[TryCatchBlockNode] = {
/** True if there exists code between start and end. */
def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = {
- start != end && (start.getOpcode match {
+ start != end && ((start.getOpcode : @switch) match {
// FrameNode, LabelNode and LineNumberNode have opcode == -1.
- case -1 | Opcodes.NOP | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
+ case -1 | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
case _ => true
})
}
- val initialNumberHandlers = method.tryCatchBlocks.size
+ var removedHandlers = Set.empty[TryCatchBlockNode]
val handlersIter = method.tryCatchBlocks.iterator()
while(handlersIter.hasNext) {
val handler = handlersIter.next()
- if (!containsExecutableCode(handler.start, handler.end)) handlersIter.remove()
+ if (!containsExecutableCode(handler.start, handler.end)) {
+ removedHandlers += handler
+ handlersIter.remove()
+ }
}
- method.tryCatchBlocks.size != initialNumberHandlers
+ removedHandlers
}
/**
@@ -135,35 +216,107 @@ object LocalOpt {
* Note that each entry in the local variable table has a start, end and index. Two entries with
* the same index, but distinct start / end ranges are different variables, they may have not the
* same type or name.
- *
- * TODO: also re-allocate locals to occupy fewer slots after eliminating unused ones
*/
- def removeUnusedLocalVariableNodes(method: MethodNode): Boolean = {
+ def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
start != end && (start match {
- case v: VarInsnNode => v.`var` == varIndex
+ case v: VarInsnNode if v.`var` == varIndex => true
case _ => variableIsUsed(start.getNext, end, varIndex)
})
}
val initialNumVars = method.localVariables.size
val localsIter = method.localVariables.iterator()
- // The parameters and `this` (for instance methods) have the lowest indices in the local variables
- // table. Note that double / long fields occupy two slots, so we sum up the sizes. Since getSize
- // returns 0 for void, we have to add `max 1`.
- val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).map(_.getSize max 1).sum
- val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
- val endParamIndex = paramsSize + thisSize
while (localsIter.hasNext) {
val local = localsIter.next()
- // parameters and `this` have the lowest indices, starting at 0
- val used = local.index < endParamIndex || variableIsUsed(local.start, local.end, local.index)
- if (!used)
- localsIter.remove()
+ val index = local.index
+ // parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered
+ if (index >= fistLocalIndex) {
+ if (!variableIsUsed(local.start, local.end, index)) localsIter.remove()
+ else if (renumber(index) != index) local.index = renumber(index)
+ }
}
- method.localVariables.size == initialNumVars
+ method.localVariables.size != initialNumVars
}
+ /**
+ * The number of local varialbe slots used for parameters and for the `this` reference.
+ */
+ private def parametersSize(method: MethodNode): Int = {
+ // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for
+ // void, we have to add `max 1`.
+ val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).iterator.map(_.getSize max 1).sum
+ val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
+ paramsSize + thisSize
+ }
+
+ /**
+ * Compact the local variable slots used in the method's implementation. This prevents having
+ * unused slots for example after eliminating unreachable code.
+ *
+ * This transformation reduces the size of the frame for invoking the method. For example, if the
+ * method has an ISTORE instruction to the local variable 3, the maxLocals of the method is at
+ * least 4, even if some local variable slots below 3 are not used by any instruction.
+ *
+ * This could be improved by doing proper register allocation.
+ */
+ def compactLocalVariables(method: MethodNode): Boolean = {
+ // This array is built up to map local variable indices from old to new.
+ val renumber = collection.mutable.ArrayBuffer.empty[Int]
+
+ // Add the index of the local variable used by `varIns` to the `renumber` array.
+ def addVar(varIns: VarInsnNode): Unit = {
+ val index = varIns.`var`
+ val isWide = (varIns.getOpcode: @switch) match {
+ case Opcodes.LLOAD | Opcodes.DLOAD | Opcodes.LSTORE | Opcodes.DSTORE => true
+ case _ => false
+ }
+
+ // Ensure the length of `renumber`. Unused variable indices are mapped to -1.
+ val minLength = if (isWide) index + 2 else index + 1
+ for (i <- renumber.length until minLength) renumber += -1
+
+ renumber(index) = index
+ if (isWide) renumber(index + 1) = index
+ }
+
+ // first phase: collect all used local variables. if the variable at index x is used, set
+ // renumber(x) = x, otherwise renumber(x) = -1. if the variable is wide (long or double), set
+ // renumber(x+1) = x.
+
+ val firstLocalIndex = parametersSize(method)
+ for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used.
+ method.instructions.iterator().asScala foreach {
+ case VarInstruction(varIns) => addVar(varIns)
+ case _ =>
+ }
+
+ // assign the next free slot to each used local variable.
+ // for example, rewrite (0, 1, -1, 3, -1, 5) to (0, 1, -1, 2, -1, 3).
+
+ var nextIndex = firstLocalIndex
+ for (i <- firstLocalIndex until renumber.length if renumber(i) != -1) {
+ renumber(i) = nextIndex
+ nextIndex += 1
+ }
+
+ // Update the local variable descriptors according to the renumber table, and eliminate stale entries
+ val removedLocalVariableDescriptors = removeUnusedLocalVariableNodes(method)(firstLocalIndex, renumber)
+
+ if (nextIndex == renumber.length) removedLocalVariableDescriptors
+ else {
+ // update variable instructions according to the renumber table
+ method.maxLocals = nextIndex
+ method.instructions.iterator().asScala.foreach {
+ case VarInstruction(varIns) =>
+ val oldIndex = varIns.`var`
+ if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex)
+ varIns.`var` = renumber(varIns.`var`)
+ case _ =>
+ }
+ true
+ }
+ }
/**
* In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM
@@ -187,4 +340,223 @@ object LocalOpt {
method.maxLocals = mw.getMaxLocals
method.maxStack = mw.getMaxStack
}
+
+ /**
+ * Removes LineNumberNodes that don't describe any executable instructions.
+ *
+ * This method expects (and asserts) that the `start` label of each LineNumberNode is the
+ * lexically preceeding label declaration.
+ */
+ def removeEmptyLineNumbers(method: MethodNode): Boolean = {
+ def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match {
+ case null => true
+ case l: LineNumberNode => true
+ case n if n.getOpcode >= 0 => false
+ case n => isEmpty(n)
+ }
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var previousLabel: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode => previousLabel = label
+ case line: LineNumberNode if isEmpty(line) =>
+ assert(line.start == previousLabel)
+ iterator.remove()
+ case _ =>
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Removes unreferenced label declarations, also squashes sequences of label definitions.
+ *
+ * [ops]; Label(a); Label(b); [ops];
+ * => subs([ops], b, a); Label(a); subs([ops], b, a);
+ */
+ def removeEmptyLabelNodes(method: MethodNode): Boolean = {
+ val references = labelReferences(method)
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var prev: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode =>
+ if (!references.contains(label)) iterator.remove()
+ else if (prev != null) {
+ references(label).foreach(substituteLabel(_, label, prev))
+ iterator.remove()
+ } else prev = label
+
+ case instruction =>
+ if (instruction.getOpcode >= 0) prev = null
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Apply various simplifications to branching instructions.
+ */
+ def simplifyJumps(method: MethodNode): Boolean = {
+ var changed = false
+
+ val allHandlers = method.tryCatchBlocks.asScala.toSet
+
+ // A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn
+ var activeHandlers = Set.empty[TryCatchBlockNode]
+
+ // Instructions that need to be removed. simplifyBranchOverGoto returns an instruction to be
+ // removed. It cannot remove it itself because the instruction may be the successor of the current
+ // instruction of the iterator, which is not supported in ASM.
+ var instructionsToRemove = Set.empty[AbstractInsnNode]
+
+ val iterator = method.instructions.iterator()
+ while (iterator.hasNext) {
+ val instruction = iterator.next()
+
+ instruction match {
+ case l: LabelNode =>
+ activeHandlers ++= allHandlers.filter(_.start == l)
+ activeHandlers = activeHandlers.filter(_.end != l)
+ case _ =>
+ }
+
+ if (instructionsToRemove(instruction)) {
+ iterator.remove()
+ instructionsToRemove -= instruction
+ } else if (isJumpNonJsr(instruction)) { // fast path - all of the below only treat jumps
+ var jumpRemoved = simplifyThenElseSameTarget(method, instruction)
+
+ if (!jumpRemoved) {
+ changed = collapseJumpChains(instruction) || changed
+ jumpRemoved = removeJumpToSuccessor(method, instruction)
+
+ if (!jumpRemoved) {
+ val staleGoto = simplifyBranchOverGoto(method, instruction)
+ instructionsToRemove ++= staleGoto
+ changed ||= staleGoto.nonEmpty
+ changed = simplifyGotoReturn(method, instruction, inTryBlock = activeHandlers.nonEmpty) || changed
+ }
+ }
+ changed ||= jumpRemoved
+ }
+ }
+ assert(instructionsToRemove.isEmpty, "some optimization required removing a previously traversed instruction. add `instructionsToRemove.foreach(method.instructions.remove)`")
+ changed
+ }
+
+ /**
+ * Removes a conditional jump if it is followed by a GOTO to the same destination.
+ *
+ * CondJump l; [nops]; GOTO l; [...]
+ * POP*; [nops]; GOTO l; [...]
+ *
+ * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump.
+ */
+ private def simplifyThenElseSameTarget(method: MethodNode, instruction: AbstractInsnNode): Boolean = instruction match {
+ case ConditionalJump(jump) =>
+ nextExecutableInstruction(instruction) match {
+ case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+
+ case _ => false
+ }
+ case _ => false
+ }
+
+ /**
+ * Replace jumps to a sequence of GOTO instructions by a jump to the final destination.
+ *
+ * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...]
+ * => Jump n; [rest unchaned]
+ *
+ * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop.
+ */
+ private def collapseJumpChains(instruction: AbstractInsnNode): Boolean = instruction match {
+ case JumpNonJsr(jump) =>
+ val target = finalJumpTarget(jump)
+ if (jump.label == target) false else {
+ jump.label = target
+ true
+ }
+
+ case _ => false
+ }
+
+ /**
+ * Eliminates unnecessary jump instructions
+ *
+ * Jump l; [nops]; l: [...]
+ * => POP*; [nops]; l: [...]
+ *
+ * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump.
+ */
+ private def removeJumpToSuccessor(method: MethodNode, instruction: AbstractInsnNode) = instruction match {
+ case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) == Some(jump.label) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+ case _ => false
+ }
+
+ /**
+ * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch
+ * and eliminates the GOTO.
+ *
+ * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...]
+ * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...]
+ *
+ * Note that no label definitions are allowed in the first [nops] section. Otherwsie, there could
+ * be some other jump to the GOTO, and eliminating it would change behavior.
+ *
+ * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option
+ * containing the GOTO that needs to be eliminated.
+ *
+ * (*) The ASM instruction iterator (used in the caller [[simplifyJumps]]) has an undefined
+ * behavior if the successor of the current instruction is removed, which may be the case here
+ */
+ private def simplifyBranchOverGoto(method: MethodNode, instruction: AbstractInsnNode): Option[JumpInsnNode] = instruction match {
+ case ConditionalJump(jump) =>
+ // don't skip over labels, see doc comment
+ nextExecutableInstruction(jump, alsoKeep = _.isInstanceOf[LabelNode]) match {
+ case Some(Goto(goto)) =>
+ if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) == Some(jump.label)) {
+ val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label)
+ method.instructions.set(jump, newJump)
+ Some(goto)
+ } else None
+
+ case _ => None
+ }
+ case _ => None
+ }
+
+ /**
+ * Inlines xRETURN and ATHROW
+ *
+ * GOTO l; [any ops]; l: xRETURN/ATHROW
+ * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW
+ *
+ * inlining is only done if the GOTO instruction is not part of a try block, otherwise the
+ * rewrite might change the behavior. For xRETURN, the reason is that return insructions may throw
+ * an IllegalMonitorStateException, as described here:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return
+ */
+ private def simplifyGotoReturn(method: MethodNode, instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match {
+ case Goto(jump) =>
+ nextExecutableInstruction(jump.label) match {
+ case Some(target) =>
+ if (isReturn(target) || target.getOpcode == Opcodes.ATHROW) {
+ method.instructions.set(jump, target.clone(null))
+ true
+ } else false
+
+ case _ => false
+ }
+ case _ => false
+ })
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
index 1fadcb8920..c6e699373b 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -7,7 +7,6 @@ package scala
package tools.nsc
package backend.opt
-import scala.tools.nsc.backend.icode.analysis.LubException
import scala.annotation.tailrec
/**
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 351eb23c4c..aa18b26d93 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile
* where `p` is defined in a library L, and is accessed from a library C (for Client),
* where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
* The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accesibility of methods and constructors isn't touched by the inliner).
+ * (the accessibility of methods and constructors isn't touched by the inliner).
*
* Thus we add one more goal to our list:
* (c) Compile C (either optimized or not) against any of L or L',
diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
new file mode 100644
index 0000000000..3f06264e3c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.net.URL
+import scala.annotation.tailrec
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.util.ClassRepresentation
+
+/**
+ * A classpath unifying multiple class- and sourcepath entries.
+ * Flat classpath can obtain entries for classes and sources independently
+ * so it tries to do operations quite optimally - iterating only these collections
+ * which are needed in the given moment and only as far as it's necessary.
+ * @param aggregates classpath instances containing entries which this class processes
+ */
+case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ @tailrec
+ def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] =
+ if (aggregates.nonEmpty) {
+ val classFile = aggregates.head.findClassFile(className)
+ if (classFile.isDefined) classFile
+ else find(aggregates.tail)
+ } else None
+
+ find(aggregates)
+ }
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ @tailrec
+ def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] =
+ if (aggregates.nonEmpty) {
+ val entry = getEntries(aggregates.head)
+ .find(_.name == simpleClassName)
+ if (entry.isDefined) entry
+ else findEntry(aggregates.tail, getEntries)
+ } else None
+
+ val classEntry = findEntry(aggregates, classesGetter(pkg))
+ val sourceEntry = findEntry(aggregates, sourcesGetter(pkg))
+
+ mergeClassesAndSources(classEntry.toList, sourceEntry.toList).headOption
+ }
+
+ override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs)
+
+ override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct
+
+ override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct
+ aggregatedPackages
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] =
+ getDistinctEntries(classesGetter(inPackage))
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] =
+ getDistinctEntries(sourcesGetter(inPackage))
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip
+ val distinctPackages = packages.flatten.distinct
+ val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
+ FlatClassPathEntries(distinctPackages, distinctClassesAndSources)
+ }
+
+ /**
+ * Returns only one entry for each name. If there's both a source and a class entry, it
+ * creates an entry containing both of them. If there would be more than one class or source
+ * entries for the same class it always would use the first entry of each type found on a classpath.
+ */
+ private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = {
+ // based on the implementation from MergedClassPath
+ var count = 0
+ val indices = collection.mutable.HashMap[String, Int]()
+ val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024)
+
+ for {
+ partOfEntries <- entries
+ entry <- partOfEntries
+ } {
+ val name = entry.name
+ if (indices contains name) {
+ val index = indices(name)
+ val existing = mergedEntries(index)
+
+ if (existing.binary.isEmpty && entry.binary.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get)
+ if (existing.source.isEmpty && entry.source.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get)
+ }
+ else {
+ indices(name) = count
+ mergedEntries += entry
+ count += 1
+ }
+ }
+ mergedEntries.toIndexedSeq
+ }
+
+ private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = {
+ val seenNames = collection.mutable.HashSet[String]()
+ val entriesBuffer = new ArrayBuffer[EntryType](1024)
+ for {
+ cp <- aggregates
+ entry <- getEntries(cp) if !seenNames.contains(entry.name)
+ } {
+ entriesBuffer += entry
+ seenNames += entry.name
+ }
+ entriesBuffer.toIndexedSeq
+ }
+
+ private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg)
+ private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
new file mode 100644
index 0000000000..9bf4e3f779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+
+/**
+ * A trait that contains factory methods for classpath elements of type T.
+ *
+ * The logic has been abstracted from ClassPath#ClassPathContext so it's possible
+ * to have common trait that supports both recursive and flat classpath representations.
+ *
+ * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath.
+ */
+trait ClassPathFactory[T] {
+
+ /**
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): T
+
+ /**
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[T]
+
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar)
+
+ def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir)
+
+ def contentsOfDirsInPath(path: String): List[T] =
+ for {
+ dir <- expandPath(path, expandStar = false)
+ name <- expandDir(dir)
+ entry <- Option(AbstractFile.getDirectory(name))
+ } yield newClassPath(entry)
+
+ def classesInExpandedPath(path: String): IndexedSeq[T] =
+ classesInPathImpl(path, expand = true).toIndexedSeq
+
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
+
+ def classesInManifest(useManifestClassPath: Boolean) =
+ if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ else Nil
+
+ // Internal
+ protected def classesInPathImpl(path: String, expand: Boolean) =
+ for {
+ file <- expandPath(path, expand)
+ dir <- Option(AbstractFile.getDirectory(file))
+ } yield newClassPath(dir)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
new file mode 100644
index 0000000000..81d2f7320f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.io.FileFilter
+import java.net.URL
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.PlainFile
+import scala.tools.nsc.util.ClassRepresentation
+import FileUtils._
+
+/**
+ * A trait allowing to look for classpath entries of given type in directories.
+ * It provides common logic for classes handling class and source files.
+ * It makes use of the fact that in the case of nested directories it's easy to find a file
+ * when we have a name of a package.
+ */
+trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val dir: File
+ assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(dir.getPath)
+
+ import FlatClassPath.RootPackage
+ private def getDirectory(forPackage: String): Option[File] = {
+ if (forPackage == RootPackage) {
+ Some(dir)
+ } else {
+ val packageDirName = FileUtils.dirPath(forPackage)
+ val packageDir = new File(dir, packageDirName)
+ if (packageDir.exists && packageDir.isDirectory) {
+ Some(packageDir)
+ } else None
+ }
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val dirForPackage = getDirectory(inPackage)
+ val nestedDirs: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter)
+ }
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ val entries = nestedDirs map { file =>
+ PackageEntryImpl(prefix + file.getName)
+ }
+ entries
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(fileFilter)
+ }
+ val entries = files map { file =>
+ val wrappedFile = new scala.reflect.io.File(file)
+ createFileEntry(new PlainFile(wrappedFile))
+ }
+ entries
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles()
+ }
+ val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
+ val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ for (file <- files) {
+ if (file.isPackage) {
+ val pkgEntry = PackageEntryImpl(packagePrefix + file.getName)
+ packageBuf += pkgEntry
+ } else if (fileFilter.accept(file)) {
+ val wrappedFile = new scala.reflect.io.File(file)
+ val abstractFile = new PlainFile(wrappedFile)
+ fileBuf += createFileEntry(abstractFile)
+ }
+ }
+ FlatClassPathEntries(packageBuf, fileBuf)
+ }
+
+ protected def createFileEntry(file: AbstractFile): FileEntryType
+ protected def fileFilter: FileFilter
+}
+
+object DirectoryFileLookup {
+
+ private[classpath] object packageDirectoryFileFilter extends FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isPackage
+ }
+}
+
+case class DirectoryFlatClassPath(dir: File)
+ extends DirectoryFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val classFile = new File(s"$dir/$relativePath.class")
+ if (classFile.exists) {
+ val wrappedClassFile = new scala.reflect.io.File(classFile)
+ val abstractClassFile = new PlainFile(wrappedClassFile)
+ Some(abstractClassFile)
+ } else None
+ }
+
+ override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatClassPath.classFileFilter
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatClassPath {
+
+ private val classFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isClass
+ }
+}
+
+case class DirectoryFlatSourcePath(dir: File)
+ extends DirectoryFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatSourcePath.sourceFileFilter
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ findSourceFile(className) map SourceFileEntryImpl
+ }
+
+ private def findSourceFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val sourceFile = Stream("scala", "java")
+ .map(ext => new File(s"$dir/$relativePath.$ext"))
+ .collectFirst { case file if file.exists() => file }
+
+ sourceFile.map { file =>
+ val wrappedSourceFile = new scala.reflect.io.File(file)
+ val abstractSourceFile = new PlainFile(wrappedSourceFile)
+ abstractSourceFile
+ }
+ }
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatSourcePath {
+
+ private val sourceFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = endsScalaOrJava(pathname.getName)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
new file mode 100644
index 0000000000..ee2528e15c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.{ File => JFile }
+import java.net.URL
+import scala.reflect.internal.FatalError
+import scala.reflect.io.AbstractFile
+
+/**
+ * Common methods related to Java files and abstract files used in the context of classpath
+ */
+object FileUtils {
+ implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name)
+
+ def isClass: Boolean = !file.isDirectory && file.hasExtension("class")
+
+ def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java"))
+
+ // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip?
+ def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip")
+
+ /**
+ * Safe method returning a sequence containing one URL representing this file, when underlying file exists,
+ * and returning given default value in other case
+ */
+ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL)
+ }
+
+ implicit class FileOps(val file: JFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName)
+
+ def isClass: Boolean = file.isFile && file.getName.endsWith(".class")
+ }
+
+ def stripSourceExtension(fileName: String): String = {
+ if (endsScala(fileName)) stripClassExtension(fileName)
+ else if (endsJava(fileName)) stripJavaExtension(fileName)
+ else throw new FatalError("Unexpected source file ending: " + fileName)
+ }
+
+ def dirPath(forPackage: String) = forPackage.replace('.', '/')
+
+ def endsClass(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class"
+
+ def endsScalaOrJava(fileName: String): Boolean =
+ endsScala(fileName) || endsJava(fileName)
+
+ def endsJava(fileName: String): Boolean =
+ fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java"
+
+ def endsScala(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala"
+
+ def stripClassExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length
+
+ def stripJavaExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 5)
+
+ // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed
+ // because then some tests in partest don't pass
+ private def mayBeValidPackage(dirName: String): Boolean =
+ (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.')
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
new file mode 100644
index 0000000000..26b5429e23
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation }
+
+/**
+ * A base trait for the particular flat classpath representation implementations.
+ *
+ * We call this variant of a classpath representation flat because it's possible to
+ * query the whole classpath using just single instance extending this trait.
+ *
+ * This is an alternative design compared to scala.tools.nsc.util.ClassPath
+ */
+trait FlatClassPath extends ClassFileLookup[AbstractFile] {
+ /** Empty string represents root package */
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry]
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[nsc] def list(inPackage: String): FlatClassPathEntries
+
+ // A default implementation which should be overriden, if we can create more efficient
+ // solution for given type of FlatClassPath
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg)
+ .find(_.name == simpleClassName)
+
+ def findClassInSources = sources(pkg)
+ .find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+
+ override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ def asClassPathStrings: Seq[String]
+}
+
+object FlatClassPath {
+ val RootPackage = ""
+}
+
+case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry])
+
+object FlatClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: FlatClassPathEntries) = (entry.packages, entry.classesAndSources)
+}
+
+sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile]
+
+trait ClassFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[nsc] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[nsc] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
new file mode 100644
index 0000000000..7f67381d4d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import FileUtils.AbstractFileOps
+
+/**
+ * Provides factory methods for flat classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
+ */
+class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] {
+
+ override def newClassPath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatClassPathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+
+ override def sourcesInPath(path: String): List[FlatClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
+ private def createSourcePath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatSourcePathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatSourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
new file mode 100644
index 0000000000..c907d565d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+
+/**
+ * Common methods related to package names represented as String
+ */
+object PackageNameUtils {
+
+ /**
+ * @param fullClassName full class name with package
+ * @return (package, simple class name)
+ */
+ def separatePkgAndClassNames(fullClassName: String): (String, String) = {
+ val lastDotIndex = fullClassName.lastIndexOf('.')
+ if (lastDotIndex == -1)
+ (RootPackage, fullClassName)
+ else
+ (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1))
+ }
+
+ def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "."
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
new file mode 100644
index 0000000000..84e21a3ccd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.annotation.tailrec
+import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources }
+import scala.tools.nsc.Settings
+import FileUtils._
+
+/**
+ * A trait providing an optional cache for classpath entries obtained from zip and jar files.
+ * It's possible to create such a cache assuming that entries in such files won't change (at
+ * least will be the same each time we'll load classpath during the lifetime of JVM process)
+ * - unlike class and source files in directories, which can be modified and recompiled.
+ * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE
+ * when there are a lot of projects having a lot of common dependencies.
+ */
+sealed trait ZipAndJarFileLookupFactory {
+
+ private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath]
+
+ def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = {
+ if (settings.YdisableFlatCpCaching) createForZipFile(zipFile)
+ else createUsingCache(zipFile, settings)
+ }
+
+ protected def createForZipFile(zipFile: AbstractFile): FlatClassPath
+
+ private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized {
+ def newClassPathInstance = {
+ if (settings.verbose || settings.Ylogcp)
+ println(s"$zipFile is not yet in the classpath cache")
+ createForZipFile(zipFile)
+ }
+ cache.getOrElseUpdate(zipFile, newClassPathInstance)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for class files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatClassPath(zipFile: File)
+ extends ZipArchiveFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass
+ }
+
+ /**
+ * This type of classpath is closly related to the support for JSR-223.
+ * Its usage can be observed e.g. when running:
+ * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala
+ * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
+ * Name: scala/Function2$mcFJD$sp.class
+ */
+ private case class ManifestResourcesFlatClassPath(file: ManifestResources)
+ extends FlatClassPath
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override def asClassPathStrings: Seq[String] = Seq(file.path)
+
+ override def asURLs: Seq[URL] = file.toURLs()
+
+ import ManifestResourcesFlatClassPath.PackageFileInfo
+ import ManifestResourcesFlatClassPath.PackageInfo
+
+ /**
+ * A cache mapping package name to abstract file for package directory and subpackages of given package.
+ *
+ * ManifestResources can iterate through the collections of entries from e.g. remote jar file.
+ * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into
+ * given package, when it's needed. On the other hand we can iterate over entries to get
+ * AbstractFiles, iterate over entries of these files etc.
+ *
+ * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time,
+ * when we need subpackages of a given package or its classes, we traverse once and cache only packages.
+ * Classes for given package can be then easily loaded when they are needed.
+ */
+ private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = {
+ val packages = collection.mutable.HashMap[String, PackageFileInfo]()
+
+ def getSubpackages(dir: AbstractFile): List[AbstractFile] =
+ (for (file <- dir if file.isPackage) yield file)(collection.breakOut)
+
+ @tailrec
+ def traverse(packagePrefix: String,
+ filesForPrefix: List[AbstractFile],
+ subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match {
+ case pkgFile :: remainingFiles =>
+ val subpackages = getSubpackages(pkgFile)
+ val fullPkgName = packagePrefix + pkgFile.name
+ packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages))
+ val newPackagePrefix = fullPkgName + "."
+ subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages))
+ traverse(packagePrefix, remainingFiles, subpackagesQueue)
+ case Nil if subpackagesQueue.nonEmpty =>
+ val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue()
+ traverse(packagePrefix, filesForPrefix, subpackagesQueue)
+ case _ =>
+ }
+
+ val subpackages = getSubpackages(file)
+ packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(_, subpackages)) =>
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ subpackages.map(packageFile => PackageEntryImpl(prefix + packageFile.name))
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(pkg, _)) =>
+ (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage))
+ }
+
+ private object ManifestResourcesFlatClassPath {
+ case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
+ case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath =
+ if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
+ else ZipArchiveFlatClassPath(zipFile.file)
+
+ private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
+ case manifestRes: ManifestResources =>
+ ManifestResourcesFlatClassPath(manifestRes)
+ case _ =>
+ val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
+ throw new IllegalArgumentException(errorMsg)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for source files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatSourcePath(zipFile: File)
+ extends ZipArchiveFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
new file mode 100644
index 0000000000..1d0de57779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.collection.Seq
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.FileZipArchive
+import FileUtils.AbstractFileOps
+
+/**
+ * A trait allowing to look for classpath entries of given type in zip and jar files.
+ * It provides common logic for classes handling class and source files.
+ * It's aware of things like e.g. META-INF directory which is correctly skipped.
+ */
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val zipFile: File
+
+ assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath)
+
+ private val archive = new FileZipArchive(zipFile)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if entry.isPackage
+ } yield PackageEntryImpl(prefix + entry.name)
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] =
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if isRequiredFileType(entry)
+ } yield createFileEntry(entry)
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val foundDirEntry = findDirEntry(inPackage)
+
+ foundDirEntry map { dirEntry =>
+ val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+
+ for (entry <- dirEntry.iterator) {
+ if (entry.isPackage)
+ pkgBuf += PackageEntryImpl(prefix + entry.name)
+ else if (isRequiredFileType(entry))
+ fileBuf += createFileEntry(entry)
+ }
+ FlatClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty)
+ }
+
+ private def findDirEntry(pkg: String) = {
+ val dirName = s"${FileUtils.dirPath(pkg)}/"
+ archive.allDirs.get(dirName)
+ }
+
+ protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType
+ protected def isRequiredFileType(file: AbstractFile): Boolean
+}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 6e3d013e52..4b1805479d 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package plugins
-import scala.reflect.io.{ File, Path }
+import scala.reflect.io.Path
import scala.tools.nsc.util.ClassPath
import scala.tools.util.PathResolver.Defaults
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 8c2b510bfd..fffbb4333f 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) {
val reset = BooleanSetting("-reset", "Reset compile server caches")
val shutdown = BooleanSetting("-shutdown", "Shutdown compile server")
val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "")
+ val port = IntSetting ("-port", "Search and start compile server in given port only",
+ 0, Some((0, Int.MaxValue)), (_: String) => None)
val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket")
val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)",
30, Some((0, Int.MaxValue)), (_: String) => None)
// For improved help output, separating fsc options from the others.
def fscSpecific = Set[Settings#Setting](
- currentDir, reset, shutdown, server, preferIPv4, idleMins
+ currentDir, reset, shutdown, server, port, preferIPv4, idleMins
)
val isFscSpecific: String => Boolean = fscSpecific map (_.name)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 23611bb629..b4987e1240 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -229,7 +229,8 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
- def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None) =
+ add(new ScalaVersionSetting(name, arg, descr, initial, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -506,28 +507,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
- /** A setting represented by a Scala version, (`default` unless set) */
+ /** A setting represented by a Scala version.
+ * The `initial` value is used if the setting is not specified.
+ * The `default` value is used if the option is specified without argument (e.g., `-Xmigration`).
+ */
class ScalaVersionSetting private[nsc](
name: String,
val arg: String,
descr: String,
- default: ScalaVersion)
+ initial: ScalaVersion,
+ default: Option[ScalaVersion])
extends Setting(name, descr) {
type T = ScalaVersion
- protected var v: T = NoScalaVersion
+ protected var v: T = initial
+ // This method is invoked if there are no colonated args. In this case the default value is
+ // used. No arguments are consumed.
override def tryToSet(args: List[String]) = {
- value = default
+ default match {
+ case Some(d) => value = d
+ case None => errorFn(s"$name requires an argument, the syntax is $helpSyntax")
+ }
Some(args)
}
override def tryToSetColon(args: List[String]) = args match {
- case Nil => value = default; Some(Nil)
- case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ case x :: xs => value = ScalaVersion(x, errorFn); Some(xs)
+ case nil => Some(nil)
}
- override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-
def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
withHelpSyntax(s"${name}:<${arg}>")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 850534f2cc..18e639b81c 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -92,7 +92,8 @@ trait ScalaSettings extends AbsScalaSettings
* The previous "-source" option is intended to be used mainly
* though this helper.
*/
- lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0"))
+ def isScala211: Boolean = source.value >= ScalaVersion("2.11.0")
+ def isScala212: Boolean = source.value >= ScalaVersion("2.12.0")
/**
* -X "Advanced" settings
@@ -111,7 +112,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
+ val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion))
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -133,7 +134,7 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types")
- val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211)
+ val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.11"))
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
@@ -201,6 +202,9 @@ trait ScalaSettings extends AbsScalaSettings
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
+ val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive)
+ val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
+
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
@@ -210,21 +214,26 @@ trait ScalaSettings extends AbsScalaSettings
val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
object YoptChoices extends MultiChoiceEnumeration {
- val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code")
+ val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.")
+ val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessery ones.")
+ val recurseUnreachableJumps = Choice("recurse-unreachable-jumps", "Recursively apply unreachable-code and simplify-jumps (if enabled) until reaching a fixpoint.")
+ val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
+ val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
+ val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
- val lNone = Choice("l:none", "Don't enable any optimizations")
+ val lNone = Choice("l:none", "Don't enable any optimizations.")
private val defaultChoices = List(unreachableCode)
- val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
+ val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
- private val methodChoices = List(lDefault)
- val lMethod = Choice("l:method", "Intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
+ private val methodChoices = List(unreachableCode, simplifyJumps, recurseUnreachableJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
private val projectChoices = List(lMethod)
- val lProject = Choice("l:project", "Cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
+ val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
private val classpathChoices = List(lProject)
- val lClasspath = Choice("l:classpath", "Cross-method optmizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
+ val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
}
val Yopt = MultiChoiceSetting(
@@ -233,7 +242,12 @@ trait ScalaSettings extends AbsScalaSettings
descr = "Enable optimizations",
domain = YoptChoices)
- def YoptUnreachableCode: Boolean = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
+ def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
+ def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps)
+ def YoptRecurseUnreachableJumps = Yopt.contains(YoptChoices.recurseUnreachableJumps)
+ def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
+ def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
+ def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
@@ -318,3 +332,8 @@ trait ScalaSettings extends AbsScalaSettings
val Discard = "discard"
}
}
+
+object ClassPathRepresentationType {
+ val Flat = "flat"
+ val Recursive = "recursive"
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index 4f45043c5e..43bdad5882 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -34,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 82c2a4d6ed..9af3efbece 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -6,13 +6,15 @@
package scala.tools.nsc
package symtab
+import classfile.ClassfileParser
import java.io.IOException
import scala.compat.Platform.currentTime
-import scala.tools.nsc.util.{ ClassPath }
-import classfile.ClassfileParser
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, ClassRepresentation }
/** This class ...
*
@@ -86,8 +88,7 @@ abstract class SymbolLoaders {
// require yjp.jar at runtime. See SI-2089.
if (settings.termConflict.isDefault)
throw new TypeError(
- root+" contains object and package with same name: "+
- name+"\none of them needs to be removed from classpath"
+ s"$root contains object and package with same name: $name\none of them needs to be removed from classpath"
)
else if (settings.termConflict.value == "package") {
warning(
@@ -154,7 +155,7 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
@@ -250,7 +251,7 @@ abstract class SymbolLoaders {
* Load contents of a package
*/
class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
- protected def description = "package loader "+ classpath.name
+ protected def description = s"package loader ${classpath.name}"
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
@@ -276,6 +277,39 @@ abstract class SymbolLoaders {
}
}
+ /**
+ * Loads contents of a package
+ */
+ class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter {
+ protected def description = {
+ val shownPackageName = if (packageName == FlatClassPath.RootPackage) "<root package>" else packageName
+ s"package loader $shownPackageName"
+ }
+
+ protected def doComplete(root: Symbol) {
+ assert(root.isPackageClass, root)
+ root.setInfo(new PackageClassInfoType(newScope, root))
+
+ val classPathEntries = classPath.list(packageName)
+
+ if (!root.isRoot)
+ for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry)
+ if (!root.isEmptyPackageClass) {
+ for (pkg <- classPathEntries.packages) {
+ val fullName = pkg.name
+
+ val name =
+ if (packageName == FlatClassPath.RootPackage) fullName
+ else fullName.substring(packageName.length + 1)
+ val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath)
+ enterPackage(root, name, packageLoader)
+ }
+
+ openPackageModule(root)
+ }
+ }
+ }
+
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends {
val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
@@ -293,8 +327,13 @@ abstract class SymbolLoaders {
*
*/
private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+
val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
- val classPath = platform.classPath
+
+ override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => platform.classPath
+ case ClassPathRepresentationType.Flat => platform.flatClassPath
+ }
}
protected def description = "class file "+ classfile.toString
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 14be8374b9..1abbdb50b0 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -16,8 +16,7 @@ import scala.annotation.switch
import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
-
-import util.ClassPath
+import scala.tools.nsc.util.ClassFileLookup
/** This abstract class implements a class file parser.
*
@@ -43,8 +42,8 @@ abstract class ClassfileParser {
*/
protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
- /** The compiler classpath. */
- def classPath: ClassPath[AbstractFile]
+ /** The way of the class file lookup used by the compiler. */
+ def classFileLookup: ClassFileLookup[AbstractFile]
import definitions._
import scala.reflect.internal.ClassfileConstants._
@@ -352,7 +351,7 @@ abstract class ClassfileParser {
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = classPath findClassFile ("" +name) getOrElse {
+ val file = classFileLookup findClassFile name.toString getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
@@ -1047,8 +1046,8 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classPath.findClassFile(entry.externalName.toString) getOrElse {
- throw new AssertionError(entry.externalName)
+ val file = classFileLookup.findClassFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(s"Class file for ${entry.externalName} not found")
}
enterClassAndModule(entry, file)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index cbe427775a..bd1fa4e707 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -130,7 +130,7 @@ abstract class ICodeReader extends ClassfileParser {
log("ICodeReader reading " + cls)
val name = cls.javaClassName
- classPath.findClassFile(name) match {
+ classFileLookup.findClassFile(name) match {
case Some(classFile) => parse(classFile, cls)
case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls)
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 2b7c6cca2c..f786ffb8f3 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,6 +8,7 @@ package transform
import symtab._
import Flags._
+import scala.tools.nsc.util.ClassPath
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -67,25 +68,30 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val implName = tpnme.implClassName(iface.name)
val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
- val impl0 = (
+ val impl0 = {
if (!inClass) NoSymbol
- else iface.owner.info.decl(implName) match {
- case NoSymbol => NoSymbol
- case implSym =>
- // Unlink a pre-existing symbol only if the implementation class is
- // visible on the compilation classpath. In general this is true under
- // -optimise and not otherwise, but the classpath can use arbitrary
- // logic so the classpath must be queried.
- if (classPath.context.isValidName(implName + ".class")) {
- iface.owner.info.decls unlink implSym
- NoSymbol
- }
- else {
- log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
- implSym
- }
+ else {
+ val typeInfo = iface.owner.info
+ typeInfo.decl(implName) match {
+ case NoSymbol => NoSymbol
+ case implSym =>
+ // Unlink a pre-existing symbol only if the implementation class is
+ // visible on the compilation classpath. In general this is true under
+ // -optimise and not otherwise, but the classpath can use arbitrary
+ // logic so the classpath must be queried.
+ // TODO this is not taken into account by flat classpath yet
+ classPath match {
+ case cp: ClassPath[_] if !cp.context.isValidName(implName + ".class") =>
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
+ implSym
+ case _ =>
+ typeInfo.decls unlink implSym
+ NoSymbol
+ }
+ }
}
- )
+ }
+
val impl = impl0 orElse {
val impl = iface.owner.newImplClass(implName, iface.pos, implFlags)
if (iface.thisSym != iface) {
@@ -345,6 +351,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
while (owner != sym && owner != impl) owner = owner.owner;
if (owner == impl) This(impl) setPos tree.pos
else tree
+ //TODO what about this commented out code?
/* !!!
case Super(qual, mix) =>
val mix1 = mix
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 1664fe0e0d..c29826551b 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -520,7 +520,9 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
*/
- case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
+ case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil)
+ if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait =>
+
def transformApply = {
// add the symbol name to a map if it's not there already
val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 835d338ab3..f7b1021ea2 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -30,7 +30,6 @@ import scala.collection.mutable.LinkedHashMap
abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
import global._
import definitions._
- import CODE._
val analyzer: global.analyzer.type = global.analyzer
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 3d8b2f02f3..b6af19250e 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -98,7 +98,7 @@ abstract class Erasure extends AddInterfaces
val len = sig.length
val copy: Array[Char] = sig.toCharArray
var changed = false
- while (i < sig.length) {
+ while (i < len) {
val ch = copy(i)
if (ch == '.' && last != '>') {
copy(i) = '$'
@@ -185,6 +185,25 @@ abstract class Erasure extends AddInterfaces
private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+ /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
+ */
+ def minimizeInterfaces(lstIfaces: List[Type]): List[Type] = {
+ var rest = lstIfaces
+ var leaves = List.empty[Type]
+ while(!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
+ if(!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol })
+ }
+ rest = rest.tail
+ }
+
+ leaves.reverse
+ }
+
+
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
@@ -192,16 +211,24 @@ abstract class Erasure extends AddInterfaces
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
- val ps = (
- if (isTraitSignature) {
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ // a signature should always start with a class
+ def ensureClassAsFirstParent(tps: List[Type]) = tps match {
+ case Nil => ObjectTpe :: Nil
+ case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps
+ case _ => tps
+ }
+
+ val minParents = minimizeInterfaces(parents)
+ val validParents =
+ if (isTraitSignature)
// java is unthrilled about seeing interfaces inherit from classes
- val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
- // traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
- else ok
- }
- else parents
- )
+ minParents filter (p => isInterfaceOrTrait(p.typeSymbol))
+ else minParents
+
+ val ps = ensureClassAsFirstParent(validParents)
+
(ps map boxedSig).mkString
}
def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
@@ -410,7 +437,6 @@ abstract class Erasure extends AddInterfaces
def fulldef(sym: Symbol) =
if (sym == NoSymbol) sym.toString
else s"$sym: ${sym.tpe} in ${sym.owner}"
- var noclash = true
val clashErrors = mutable.Buffer[(Position, String)]()
def clashError(what: String) = {
val pos = if (member.owner == root) member.pos else root.pos
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 228c9da624..116047a2ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -208,7 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~LOCAL | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index bbd11efa7e..c1c025ad48 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package transform
-import symtab.Flags._
import scala.reflect.internal.SymbolPairs
/** A class that yields a kind of iterator (`Cursor`),
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 908aa69310..9c81e31ad9 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -861,11 +861,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
newOverload(sym, specMember, env)
- // if this is a class, we insert the normalized member in scope,
- // if this is a method, there's no attached scope for it (EmptyScope)
- val decls = owner.info.decls
- if (decls != EmptyScope)
- decls.enter(specMember)
specMember
}
}
@@ -1504,20 +1499,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val residualTargs = symbol.info.typeParams zip baseTargs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
- // See SI-5583. Don't know why it happens now if it didn't before.
- if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
- baseTree
- }
- else {
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
- )
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
+ )
- val tree1 = gen.mkTypeApply(specTree, residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
- }
+ val tree1 = gen.mkTypeApply(specTree, residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
curTree = tree
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
index e2508b8d08..4673be6de7 100644
--- a/src/compiler/scala/tools/nsc/transform/Statics.scala
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -1,9 +1,6 @@
package scala.tools.nsc
package transform
-import symtab._
-import Flags._
-
import collection.mutable.Buffer
abstract class Statics extends Transform with ast.TreeDSL {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index ef534f70fd..16ea3ea90f 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -129,6 +129,13 @@ abstract class TailCalls extends Transform {
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ final def noTailContext() = clonedTailContext(false)
+ final def yesTailContext() = clonedTailContext(true)
+ protected def clonedTailContext(tailPos: Boolean): TailContext = this match {
+ case _ if this.tailPos == tailPos => this
+ case clone: ClonedTailContext => clone.that.clonedTailContext(tailPos)
+ case _ => new ClonedTailContext(this, tailPos)
+ }
}
object EmptyTailContext extends TailContext {
@@ -174,7 +181,7 @@ abstract class TailCalls extends Transform {
}
def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
- class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ class ClonedTailContext(val that: TailContext, override val tailPos: Boolean) extends TailContext {
def method = that.method
def tparams = that.tparams
def methodPos = that.methodPos
@@ -183,9 +190,6 @@ abstract class TailCalls extends Transform {
}
private var ctx: TailContext = EmptyTailContext
- private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
- private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
-
override def transformUnit(unit: CompilationUnit): Unit = {
try {
@@ -206,16 +210,16 @@ abstract class TailCalls extends Transform {
finally this.ctx = saved
}
- def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
- def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
+ def yesTailTransform(tree: Tree): Tree = transform(tree, ctx.yesTailContext())
+ def noTailTransform(tree: Tree): Tree = transform(tree, ctx.noTailContext())
def noTailTransforms(trees: List[Tree]) = {
- val nctx = noTailContext()
- trees map (t => transform(t, nctx))
+ val nctx = ctx.noTailContext()
+ trees mapConserve (t => transform(t, nctx))
}
override def transform(tree: Tree): Tree = {
/* A possibly polymorphic apply to be considered for tail call transformation. */
- def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
+ def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree], mustTransformArgs: Boolean = true) = {
val receiver: Tree = fun match {
case Select(qual, _) => qual
case _ => EmptyTree
@@ -223,7 +227,7 @@ abstract class TailCalls extends Transform {
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
- def transformArgs = noTailTransforms(args)
+ def transformArgs = if (mustTransformArgs) noTailTransforms(args) else args
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
/* Records failure reason in Context for reporting.
@@ -265,6 +269,10 @@ abstract class TailCalls extends Transform {
!(sym.hasAccessorFlag || sym.isConstructor)
}
+ // intentionally shadowing imports from definitions for performance
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
tree match {
case ValDef(_, _, _, _) =>
if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
@@ -312,8 +320,13 @@ abstract class TailCalls extends Transform {
// the assumption is once we encounter a case, the remainder of the block will consist of cases
// the prologue may be empty, usually it is the valdef that stores the scrut
val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ val transformedPrologue = noTailTransforms(prologue)
+ val transformedCases = transformTrees(cases)
+ val transformedStats =
+ if ((prologue eq transformedPrologue) && (cases eq transformedCases)) stats // allow reuse of `tree` if the subtransform was an identity
+ else transformedPrologue ++ transformedCases
treeCopy.Block(tree,
- noTailTransforms(prologue) ++ transformTrees(cases),
+ transformedStats,
transform(expr)
)
@@ -380,7 +393,7 @@ abstract class TailCalls extends Transform {
if (res ne arg)
treeCopy.Apply(tree, fun, res :: Nil)
else
- rewriteApply(fun, fun, Nil, args)
+ rewriteApply(fun, fun, Nil, args, mustTransformArgs = false)
case Apply(fun, args) =>
rewriteApply(fun, fun, Nil, args)
@@ -421,6 +434,10 @@ abstract class TailCalls extends Transform {
def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+ // intentionally shadowing imports from definitions for performance
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
override def traverse(tree: Tree) = tree match {
// we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol =>
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
index f83b6f857e..3b23306386 100644
--- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -1,7 +1,6 @@
package scala.tools.nsc
package transform
-import scala.reflect.internal._
import scala.tools.nsc.ast.TreeDSL
import scala.tools.nsc.Global
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index d862805a07..22661d6ccf 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -544,10 +544,17 @@ trait MatchTranslation {
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
val binder = freshSym(pos, pureType(resultInMonad))
+ val potentiallyMutableBinders: Set[Symbol] =
+ if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq)
+ Set.empty
+ else
+ // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala
+ subPatBinders.toSet
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
subPatBinders,
subPatRefs(binder),
+ potentiallyMutableBinders,
aligner.isBool,
checkedLength,
patBinderOrCasted,
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 3abec521df..3fd9ce76f8 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -192,13 +192,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
+ val potentiallyMutableBinders: Set[Symbol],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
) extends FunTreeMaker with PreserveSubPatBinders {
- def extraStoredBinders: Set[Symbol] = Set()
+ def extraStoredBinders: Set[Symbol] = potentiallyMutableBinders
debug.patmat(s"""
|ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index 79f5e3bee8..8924394b72 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -73,9 +73,7 @@ trait ScalacPatternExpanders {
* Unfortunately the MethodType does not carry the information of whether
* it was unapplySeq, so we have to funnel that information in separately.
*/
- def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
- val whole = firstParamType(method)
- val result = method.finalResultType
+ def unapplyMethodTypes(whole: Type, result: Type, isSeq: Boolean): Extractor = {
val expanded = (
if (result =:= BooleanTpe) Nil
else typeOfMemberNamedGet(result) match {
@@ -124,11 +122,11 @@ trait ScalacPatternExpanders {
case _ => sel
}
val patterns = newPatterns(args)
- val isSeq = sel.symbol.name == nme.unapplySeq
val isUnapply = sel.symbol.name == nme.unapply
+
val extractor = sel.symbol.name match {
- case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false)
- case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+ case nme.unapply => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = true)
case _ => applyMethodTypes(fn.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 3a77cab919..fc632e0d0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -11,12 +11,28 @@ import scala.language.postfixOps
/** On pattern matcher checkability:
*
+ * The spec says that case _: List[Int] should be always issue
+ * an unchecked warning:
+ *
+ * > Types which are not of one of the forms described above are
+ * > also accepted as type patterns. However, such type patterns
+ * > will be translated to their erasure (§3.7). The Scala compiler
+ * > will issue an “unchecked” warning for these patterns to flag
+ * > the possible loss of type-safety.
+ *
+ * But the implementation goes a little further to omit warnings
+ * based on the static type of the scrutinee. As a trivial example:
+ *
+ * def foo(s: Seq[Int]) = s match { case _: List[Int] => }
+ *
+ * need not issue this warning.
+ *
* Consider a pattern match of this form: (x: X) match { case _: P => }
*
* There are four possibilities to consider:
* [P1] X will always conform to P
* [P2] x will never conform to P
- * [P3] X <: P if some runtime test is true
+ * [P3] X will conform to P if some runtime test is true
* [P4] X cannot be checked against P
*
* The first two cases correspond to those when there is enough
@@ -28,6 +44,11 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
+ * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * P_wild is the result of substituting wildcard types in place of
+ * pattern type variables. This is intentionally stricter than
+ * (X matchesPattern P), see SI-8597 for motivating test cases.
+ *
* Examples of how this info is put to use:
* sealed trait A[T] ; class B[T] extends A[T]
* def f(x: B[Int]) = x match { case _: A[Int] if true => }
@@ -100,7 +121,7 @@ trait Checkable {
private def typeArgsInTopLevelType(tp: Type): List[Type] = {
val tps = tp match {
case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
- case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg)
case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
case _ => Nil
@@ -108,14 +129,31 @@ trait Checkable {
tps filterNot isUnwarnableTypeArg
}
+ private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = {
+ def typeVarToWildcard(tp: Type) = {
+ // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala
+ if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp
+ }
+ val pattTpWild = pattTp.map(typeVarToWildcard)
+ scrut <:< pattTpWild
+ }
+
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
+ def PErased = {
+ P match {
+ case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P)
+ case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*)
+ }
+ }
+ def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym)
+
+
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
- def P1 = X matchesPattern P
+ def P1 = scrutConformsToPatternType(X, P)
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
- def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P)
def P4 = !(P1 || P2 || P3)
def summaryString = f"""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index eb29ccf4e1..a7d0d32c6f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -104,7 +104,7 @@ trait Contexts { self: Analyzer =>
// there must be a scala.xml package when xml literals were parsed in this unit
if (unit.hasXml && ScalaXmlPackage == NoSymbol)
- reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see http://docs.scala-lang.org/overviews/core/scala-2.11.html#scala-xml.")
+ reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala-xml for details.")
// scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
// We detect `scala-xml` by looking for `scala.xml.TopScope` and
@@ -330,7 +330,7 @@ trait Contexts { self: Analyzer =>
// if set, errors will not be reporter/thrown
def bufferErrors = reporter.isBuffering
- def reportErrors = !bufferErrors
+ def reportErrors = !(bufferErrors || reporter.isThrowing)
// whether to *report* (which is separate from buffering/throwing) ambiguity errors
def ambiguousErrors = this(AmbiguousErrors)
@@ -480,7 +480,8 @@ trait Contexts { self: Analyzer =>
// SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod
- if (tree != outer.tree) c(TypeConstructorAllowed) = false
+ if (tree != outer.tree)
+ c(TypeConstructorAllowed) = false
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
@@ -798,7 +799,7 @@ trait Contexts { self: Analyzer =>
isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(name)
- (e ne null) && (e.owner == scope)
+ (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
@@ -1247,6 +1248,7 @@ trait Contexts { self: Analyzer =>
def makeImmediate: ContextReporter = this
def makeBuffering: ContextReporter = this
def isBuffering: Boolean = false
+ def isThrowing: Boolean = false
/** Emit an ambiguous error according to context.ambiguousErrors
*
@@ -1384,6 +1386,7 @@ trait Contexts { self: Analyzer =>
* TODO: get rid of it, use ImmediateReporter and a check for reporter.hasErrors where necessary
*/
private[typechecker] class ThrowingReporter extends ContextReporter {
+ override def isThrowing = true
protected def handleError(pos: Position, msg: String): Unit = throw new TypeError(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index ee2775ee26..8979b26719 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -295,11 +295,17 @@ trait Infer extends Checkable {
&& !isByNameParamType(tp)
&& isCompatible(tp, dropByName(pt))
)
+ def isCompatibleSam(tp: Type, pt: Type): Boolean = {
+ val samFun = typer.samToFunctionType(pt)
+ (samFun ne NoType) && isCompatible(tp, samFun)
+ }
+
val tp1 = normalize(tp)
( (tp1 weak_<:< pt)
|| isCoercible(tp1, pt)
|| isCompatibleByName(tp, pt)
+ || isCompatibleSam(tp, pt)
)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index ba183fe3e6..0aa62d771e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -212,7 +212,9 @@ trait MethodSynthesis {
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
- abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
+ context.error(cd.pos, s"Internal error: Symbol for synthetic factory method not found among ${context.unit.synthetics.keys.mkString(", ")}")
+ // Soldier on for the sake of the presentation compiler
+ List(cd)
}
case _ =>
stat :: Nil
@@ -355,8 +357,9 @@ trait MethodSynthesis {
def derivedSym: Symbol = {
// Only methods will do! Don't want to pick up any stray
// companion objects of the same name.
- val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
- assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
+ val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic)
+ if (result == NoSymbol || result.isOverloaded)
+ context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}")
result
}
def derivedTree: DefDef =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index e876d4a6af..0bb94be636 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -296,7 +296,7 @@ trait Namers extends MethodSynthesis {
}
tree.symbol match {
case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context)
- case sym => enterExistingSym(sym)
+ case sym => enterExistingSym(sym, tree)
}
}
@@ -413,6 +413,7 @@ trait Namers extends MethodSynthesis {
if (isRedefinition) {
updatePosFlags(existing, tree.pos, tree.mods.flags)
setPrivateWithin(tree, existing)
+ clearRenamedCaseAccessors(existing)
existing
}
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
@@ -583,7 +584,7 @@ trait Namers extends MethodSynthesis {
// more than one hidden name, the second will not be warned.
// So it is the position of the actual hidden name.
//
- // Note: java imports have precence over definitions in the same package
+ // Note: java imports have precedence over definitions in the same package
// so don't warn for them. There is a corresponding special treatment
// in the shadowing rules in typedIdent to (SI-7232). In any case,
// we shouldn't be emitting warnings for .java source files.
@@ -736,7 +737,9 @@ trait Namers extends MethodSynthesis {
}
// Hooks which are overridden in the presentation compiler
- def enterExistingSym(sym: Symbol): Context = this.context
+ def enterExistingSym(sym: Symbol, tree: Tree): Context = {
+ this.context
+ }
def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index af4e9e8927..d2931ff9e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -543,7 +543,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkOverrideDeprecated() {
- if (other.hasDeprecatedOverridingAnnotation) {
+ if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
currentRun.reporting.deprecationWarning(member.pos, other, msg)
@@ -1095,7 +1095,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectTpe <:< common)
+ if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe && ObjectTpe <:< receiver.tpe))
unrelatedTypes()
}
// warn if actual has a case parent that is not same as receiver's;
@@ -1404,7 +1404,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (symbol.isDeprecated) {
val concrOvers =
symbol.allOverriddenSymbols.filter(sym =>
- !sym.isDeprecated && !sym.isDeferred)
+ !sym.isDeprecated && !sym.isDeferred && !sym.hasDeprecatedOverridingAnnotation && !sym.enclClass.hasDeprecatedInheritanceAnnotation)
if(!concrOvers.isEmpty)
currentRun.reporting.deprecationWarning(
tree.pos,
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 57f27a05fd..ea44b9dc39 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -61,7 +61,7 @@ trait StdAttachments {
val metadata = MacroExpansionAttachment(expandee, expanded)
expandee updateAttachment metadata
expanded match {
- case expanded: Tree => expanded updateAttachment metadata
+ case expanded: Tree if !expanded.isEmpty => expanded updateAttachment metadata
case _ => // do nothing
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index bedca88974..1daff02c23 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -54,6 +54,9 @@ trait SyntheticMethods extends ast.TreeDSL {
/** Does not force the info of `caseclazz` */
final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
(renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
+ final def clearRenamedCaseAccessors(caseclazz: Symbol): Unit = {
+ renamedCaseAccessors -= caseclazz
+ }
/** Add the synthetic methods to case classes.
*/
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index 90ec3a89b8..57dc74d2a0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -11,7 +11,6 @@ trait Tags {
self: Typer =>
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
@@ -66,7 +65,7 @@ trait Tags {
// if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail
if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree
else {
- val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
+ val tagSym = if (concrete) runDefinitions.TypeTagClass else runDefinitions.WeakTypeTagClass
val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
val taggedTp = appliedType(tagTp, List(tp))
resolveTag(pos, taggedTp, allowMaterialization)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 1dac27639c..0f90c6a478 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -607,7 +607,7 @@ trait TypeDiagnostics {
if (!c.owner.exists || c.owner.isClass || c.owner.isMethod || (c.owner.isType && !c.owner.isParameter)) c
else enclClassOrMethodOrTypeMember(c.outer)
- val tt = tparams.filter(_.name != typeNames.WILDCARD).foreach { tp =>
+ tparams.filter(_.name != typeNames.WILDCARD).foreach { tp =>
// we don't care about type params shadowing other type params in the same declaration
enclClassOrMethodOrTypeMember(context).outer.lookupSymbol(tp.name, s => s != tp.symbol && s.hasRawInfo && reallyExists(s)) match {
case LookupSucceeded(_, sym2) => context.warning(tp.pos,
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index aae2d24b32..aaa75b5ee1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -741,6 +741,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ =>
}
+ /**
+ * Convert a SAM type to the corresponding FunctionType,
+ * extrapolating BoundedWildcardTypes in the process
+ * (no type precision is lost by the extrapolation,
+ * but this facilitates dealing with the types arising from Java's use-site variance).
+ */
+ def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = {
+ val samSym = sam orElse samOf(tp)
+
+ def correspondingFunctionSymbol = {
+ val numVparams = samSym.info.params.length
+ if (numVparams > definitions.MaxFunctionArity) NoSymbol
+ else FunctionClass(numVparams)
+ }
+
+ if (samSym.exists && samSym.owner != correspondingFunctionSymbol) // don't treat Functions as SAMs
+ wildcardExtrapolation(normalize(tp memberInfo samSym))
+ else NoType
+ }
+
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -824,7 +844,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+ if (!meth.isConstructor && (isFunctionType(pt) || samOf(pt).exists)) { // (4.2)
debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
@@ -850,13 +870,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
- && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ && !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
@@ -1516,7 +1536,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ val dummy = context.outer.owner.newLocalDummy(templ.pos)
+ val cscope = context.outer.makeNewScope(ctor, dummy)
+ if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file
val cbody2 = { // called both during completion AND typing.
val typer1 = newTyper(cscope)
// XXX: see about using the class's symbol....
@@ -1655,7 +1677,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val sameSourceFile = context.unit.source.file == psym.sourceFile
- if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
+ if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation &&
+ !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
context.deprecationWarning(parent.pos, psym, msg)
@@ -2678,7 +2701,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* `{
* def apply$body(p1: T1, ..., pN: TN): T = body
* new S {
- * def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN)
+ * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
* }
* }`
*
@@ -2688,6 +2711,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
*
* The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
* and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
+ * If it is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
+ *
+ * The types T1' ... TN' and T' are derived from the method signature of the sam method,
+ * as seen from the fully defined `samClassTpFullyDefined`.
*
* The function's body is put in a method outside of the class definition to enforce scoping.
* S's members should not be in scope in `body`.
@@ -2699,6 +2726,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
* which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
* and have the instantiation of the first occurrence propagate to the rest of the block.
+ *
+ * TODO: by-name params
+ * scala> trait LazySink { def accept(a: => Any): Unit }
+ * defined trait LazySink
+ *
+ * scala> val f: LazySink = (a) => (a, a)
+ * f: LazySink = $anonfun$1@1fb26910
+ *
+ * scala> f(println("!"))
+ * <console>:10: error: LazySink does not take parameters
+ * f(println("!"))
+ * ^
+ *
+ * scala> f.accept(println("!"))
+ * !
+ * !
*/
def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
// assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
@@ -2779,14 +2822,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
samClassTp
}
- // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)`
+ // what's the signature of the method that we should actually be overriding?
+ val samMethTp = samClassTpFullyDefined memberInfo sam
+ // Before the mutation, `tp <:< vpar.tpt.tpe` should hold.
+ // TODO: error message when this is not the case, as the expansion won't type check
+ // - Ti' <:< Ti and T <: T' must hold for the samDef body to type check
+ val funArgTps = foreach2(samMethTp.paramTypes, fun.vparams)((tp, vpar) => vpar.tpt setType tp)
+
+ // `final override def ${sam.name}($p1: $T1', ..., $pN: $TN'): ${samMethTp.finalResultType} = ${sam.name}\$body'($p1, ..., $pN)`
val samDef =
DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
sam.name.toTermName,
Nil,
List(fun.vparams),
- TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus,
- Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name)))
+ TypeTree(samMethTp.finalResultType) setPos sampos.focus,
+ Apply(Ident(bodyName), fun.vparams map gen.paramToArg)
)
val serializableParentAddendum =
@@ -2816,6 +2866,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
)
}
+ // TODO: improve error reporting -- when we're in silent mode (from `silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError`)
+ // the errors in the function don't get out...
+ if (block exists (_.isErroneous))
+ context.error(fun.pos, s"Could not derive subclass of $samClassTp\n (with SAM `def $sam$samMethTp`)\n based on: $fun.")
+
classDef.symbol addAnnotation SerialVersionUIDAnnotation
block
}
@@ -2836,7 +2891,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* as `(a => a): Int => Int` should not (yet) get the sam treatment.
*/
val sam =
- if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol
+ if (pt.typeSymbol == FunctionSymbol) NoSymbol
else samOf(pt)
/* The SAM case comes first so that this works:
@@ -2846,15 +2901,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* Note that the arity of the sam must correspond to the arity of the function.
*/
val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
+ val ptNorm = if (samViable) samToFunctionType(pt, sam) else pt
val (argpts, respt) =
- if (samViable) {
- val samInfo = pt memberInfo sam
- (samInfo.paramTypes, samInfo.resultType)
- } else {
- pt baseType FunctionSymbol match {
- case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
- case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
- }
+ ptNorm baseType FunctionSymbol match {
+ case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+ case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
}
if (!FunctionSymbol.exists)
@@ -5132,16 +5183,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
- if (!refTyped.isErrorTyped)
+ if (refTyped.isErrorTyped) {
+ setError(tree)
+ } else {
tree setType refTyped.tpe.resultType
-
- if (treeInfo.admitsTypeSelection(refTyped)) tree
- else UnstableTreeError(refTyped)
+ if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(tree)
+ }
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
+ else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
@@ -5153,7 +5207,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedExistentialTypeTree(tree, mode)
+ typer =>
+ if (context.inTypeConstructorAllowed)
+ typer.context.withinTypeConstructorAllowed(typer.typedExistentialTypeTree(tree, mode))
+ else
+ typer.typedExistentialTypeTree(tree, mode)
}
checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
tree1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index cc2d9141ce..fc1f45e358 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -142,17 +142,30 @@ trait Unapplies extends ast.TreeDSL {
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
- val tparams = constrTparamsInvariant(cdef)
- val method = constrParamss(cdef) match {
+ val tparams = constrTparamsInvariant(cdef)
+ val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
- val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
+ val resultType = if (!settings.isScala212) TypeTree() else { // fix for SI-6541 under -Xsource:2.12
+ def repeatedToSeq(tp: Tree) = tp match {
+ case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps)
+ case _ => tp
+ }
+ constrParamss(cdef) match {
+ case Nil | Nil :: _ =>
+ gen.rootScalaDot(tpnme.Boolean)
+ case params :: _ =>
+ val constrParamTypes = params.map(param => repeatedToSeq(param.tpt))
+ AppliedTypeTree(gen.rootScalaDot(tpnme.Option), List(treeBuilder.makeTupleType(constrParamTypes)))
+ }
+ }
+ val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
- DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
+ DefDef(caseMods, method, tparams, List(cparams), resultType, body)
)
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
new file mode 100644
index 0000000000..4451651229
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.util
+
+import scala.tools.nsc.io.AbstractFile
+import java.net.URL
+
+/**
+ * Simple interface that allows us to abstract over how class file lookup is performed
+ * in different classpath representations.
+ */
+// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassFileLookup[T] {
+ def findClassFile(name: String): Option[AbstractFile]
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(name: String): Option[ClassRepresentation[T]]
+
+ /**
+ * A sequence of URLs representing this classpath.
+ */
+ def asURLs: Seq[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String
+
+ // for compatibility purposes
+ @deprecated("Use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
+/**
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
+ */
+// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassRepresentation[T] {
+ def binary: Option[T]
+ def source: Option[AbstractFile]
+
+ def name: String
+}
+
+object ClassRepresentation {
+ def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] =
+ Some((classRep.binary, classRep.source))
+}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index e89f08ec6b..8d4d07759f 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -7,16 +7,18 @@
package scala.tools.nsc
package util
+import io.{ AbstractFile, Directory, File, Jar }
+import java.net.MalformedURLException
import java.net.URL
+import java.util.regex.PatternSyntaxException
import scala.collection.{ mutable, immutable }
-import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import Jar.isJarOrZip
+import scala.tools.nsc.classpath.FileUtils
+
import File.pathSeparator
-import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
-import java.net.MalformedURLException
-import java.util.regex.PatternSyntaxException
-import scala.reflect.runtime.ReflectionUtils
+import FileUtils.endsClass
+import FileUtils.endsScalaOrJava
+import Jar.isJarOrZip
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -89,7 +91,7 @@ object ClassPath {
/** A class modeling aspects of a ClassPath which should be
* propagated to any classpaths it creates.
*/
- abstract class ClassPathContext[T] {
+ abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] {
/** A filter which can be used to exclude entities from the classpath
* based on their name.
*/
@@ -99,75 +101,47 @@ object ClassPath {
*/
def validClassFile(name: String) = endsClass(name) && isValidName(name)
def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+ def validSourceFile(name: String) = endsScalaOrJava(name)
/** From the representation to its identifier.
*/
def toBinaryName(rep: T): String
- /** Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): ClassPath[T]
-
- /** Creators for sub classpaths which preserve this context.
- */
def sourcesInPath(path: String): List[ClassPath[T]] =
for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
-
- def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
- newClassPath(entry)
-
- def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, expand = true).toIndexedSeq
-
- def classesInPath(path: String) = classesInPathImpl(path, expand = false)
-
- // Internal
- private def classesInPathImpl(path: String, expand: Boolean) =
- for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
- newClassPath(dir)
-
- def classesInManifest(used: Boolean) =
- if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
- def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+ def manifests: List[java.net.URL] = {
+ import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
+ Thread.currentThread().getContextClassLoader()
+ .getResources("META-INF/MANIFEST.MF")
+ .filter(_.getProtocol == "jar").toList
+ }
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
assert(endsClass(name), name)
- name.substring(0, name.length - 6)
+ FileUtils.stripClassExtension(name)
}
+
def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
object DefaultJavaContext extends JavaContext
- private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
-
/** From the source file to its identifier.
*/
- def toSourceName(f: AbstractFile): String = {
- val name = f.name
-
- if (endsScala(name)) name.substring(0, name.length - 6)
- else if (endsJava(name)) name.substring(0, name.length - 5)
- else throw new FatalError("Unexpected source file ending: " + name)
- }
+ def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name)
}
+
import ClassPath._
/**
* Represents a package which contains classes and other packages
*/
-abstract class ClassPath[T] {
- type AnyClassRep = ClassPath[T]#ClassRep
-
+abstract class ClassPath[T] extends ClassFileLookup[T] {
/**
* The short name of the package (without prefix)
*/
@@ -179,28 +153,37 @@ abstract class ClassPath[T] {
*/
def origin: Option[String] = None
- /** A list of URLs representing this classpath.
- */
- def asURLs: List[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClasspathString: String
-
/** Info which should be propagated to any sub-classpaths.
*/
def context: ClassPathContext[T]
/** Lists of entities.
*/
- def classes: IndexedSeq[AnyClassRep]
+ def classes: IndexedSeq[ClassRepresentation[T]]
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
+ /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
+ * Subclasses such as `MergedClassPath` typically return lists with more elements.
+ */
+ def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
+
+ /** Merge classpath of `platform` and `urls` into merged classpath */
+ def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allEntries =
+ (entries ++
+ urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allEntries, context)
+ }
+
/**
* Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
- case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
+ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] {
def name: String = binary match {
case Some(x) => context.toBinaryName(x)
case _ =>
@@ -219,25 +202,27 @@ abstract class ClassPath[T] {
* Find a ClassRep given a class name of the form "package.subpackage.ClassName".
* Does not support nested classes on .NET
*/
- def findClass(name: String): Option[AnyClassRep] =
+ override def findClass(name: String): Option[ClassRepresentation[T]] =
splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
- case x: ClassRep => x
+ case x: ClassRepresentation[T] => x
case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
}
case _ =>
classes find (_.name == name)
}
- def findClassFile(name: String): Option[AbstractFile] =
+ override def findClassFile(name: String): Option[AbstractFile] =
findClass(name) match {
- case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x)
case _ => None
}
- def sortString = join(split(asClasspathString).sorted: _*)
+ override def asSourcePathString: String = sourcepaths.mkString(pathSeparator)
+
+ def sortString = join(split(asClassPathString).sorted: _*)
override def equals(that: Any) = that match {
case x: ClassPath[_] => this.sortString == x.sortString
case _ => false
@@ -249,10 +234,12 @@ abstract class ClassPath[T] {
* A Classpath containing source files
*/
class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs()
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
private def traverse() = {
@@ -275,10 +262,12 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
* A directory (or a .jar file) containing classfiles and packages
*/
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs(default = Seq(new URL(name)))
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
// calculates (packages, classes) in one traversal.
@@ -322,9 +311,10 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: IndexedSeq[ClassPath[T]],
+ override val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
+
def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
this(entries.toIndexedSeq, context)
@@ -333,12 +323,12 @@ extends ClassPath[T] {
lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
+ override def asClassPathString: String = join(entries map (_.asClassPathString) : _*)
- lazy val classes: IndexedSeq[AnyClassRep] = {
+ lazy val classes: IndexedSeq[ClassRepresentation[T]] = {
var count = 0
val indices = mutable.HashMap[String, Int]()
- val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
+ val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024)
for (e <- entries; c <- e.classes) {
val name = c.name
@@ -347,9 +337,9 @@ extends ClassPath[T] {
val existing = cls(idx)
if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = existing.copy(binary = c.binary)
+ cls(idx) = ClassRep(binary = c.binary, source = existing.source)
if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = existing.copy(source = c.source)
+ cls(idx) = ClassRep(binary = existing.binary, source = c.source)
}
else {
indices(name) = count
@@ -387,10 +377,12 @@ extends ClassPath[T] {
}
new MergedClassPath[T](newEntries, context)
}
+
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClasspathString split ':' foreach (x => println(" " + x))
+ asClassPathString split ':' foreach (x => println(" " + x))
}
+
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 3ae21b6b98..8d8418945a 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -1,17 +1,17 @@
package scala.tools
package reflect
+import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.PathResolver
+import scala.tools.util.PathResolverFactory
object ReflectMain extends Driver {
private def classloaderFromSettings(settings: Settings) = {
- val classpath = new PathResolver(settings).result
- ScalaClassLoader.fromURLs(classpath.asURLs, getClass.getClassLoader)
+ val classPathURLs = PathResolverFactory.create(settings).resultAsURLs
+ ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader)
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
deleted file mode 100644
index 3cfc1eb2a1..0000000000
--- a/src/compiler/scala/tools/util/Javap.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import scala.tools.nsc.util.ScalaClassLoader
-import java.io.PrintWriter
-
-trait JpResult {
- def isError: Boolean
- def value: Any
- def show(): Unit
-}
-
-trait Javap {
- def loader: ScalaClassLoader
- def printWriter: PrintWriter
- def apply(args: Seq[String]): List[JpResult]
- def tryFile(path: String): Option[Array[Byte]]
- def tryClass(path: String): Array[Byte]
-}
-
-object NoJavap extends Javap {
- def loader: ScalaClassLoader = getClass.getClassLoader
- def printWriter: PrintWriter = new PrintWriter(System.err, true)
- def apply(args: Seq[String]): List[JpResult] = Nil
- def tryFile(path: String): Option[Array[Byte]] = None
- def tryClass(path: String): Array[Byte] = Array()
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 5526660509..8e5b1e0a5c 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -7,14 +7,17 @@ package scala
package tools
package util
+import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
-import scala.tools.nsc.{ Settings }
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath }
import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import scala.reflect.runtime.ReflectionUtils
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
+import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory }
+import scala.tools.nsc.settings.ClassPathRepresentationType
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
@@ -48,9 +51,8 @@ object PathResolver {
/** Values found solely by inspecting environment or property variables.
*/
object Environment {
- private def searchForBootClasspath = (
+ private def searchForBootClasspath =
systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
- )
/** Environment variables which java pays attention to so it
* seems we do as well.
@@ -104,7 +106,7 @@ object PathResolver {
else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
else ""
- // XXX It must be time for someone to figure out what all these things
+ // TODO It must be time for someone to figure out what all these things
// are intended to do. This is disabled here because it was causing all
// the scala jars to end up on the classpath twice: one on the boot
// classpath as set up by the runner (or regular classpath under -nobootcp)
@@ -170,39 +172,48 @@ object PathResolver {
!ReflectionUtils.scalacShouldntLoadClassfile(name)
}
- // called from scalap
+ @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5")
def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
- new PathResolver(s, context) result
+ new PathResolver(s, context).result
}
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
*/
- def main(args: Array[String]): Unit = {
+ def main(args: Array[String]): Unit =
if (args.isEmpty) {
println(Environment)
println(Defaults)
- }
- else {
+ } else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
- val pr = new PathResolver(settings)
- println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ val pr = PathResolverFactory.create(settings)
+ println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show()
+
+ pr.result match {
+ case cp: JavaClassPath =>
+ cp.show()
+ case cp: AggregateFlatClassPath =>
+ println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
+ }
}
- }
}
-class PathResolver(settings: Settings, context: JavaContext) {
- import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+trait PathResolverResult {
+ def result: ClassFileLookup[AbstractFile]
- def this(settings: Settings) = this(settings,
- if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
- else DefaultJavaContext)
+ def resultAsURLs: Seq[URL] = result.asURLs
+}
+
+abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType]
+(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType])
+ extends PathResolverResult {
+
+ import PathResolver.{ AsLines, Defaults, ppcp }
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -232,6 +243,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+
/** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
* [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
* [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
@@ -250,16 +262,14 @@ class PathResolver(settings: Settings, context: JavaContext) {
* - Otherwise, if CLASSPATH is set, it is that
* - If neither of those, then "." is used.
*/
- def userClassPath = (
- if (!settings.classpath.isDefault)
- settings.classpath.value
+ def userClassPath =
+ if (!settings.classpath.isDefault) settings.classpath.value
else sys.env.getOrElse("CLASSPATH", ".")
- )
- import context._
+ import classPathFactory._
// Assemble the elements!
- def basis = List[Traversable[ClassPath[AbstractFile]]](
+ def basis = List[Traversable[BaseClassPathType]](
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -278,7 +288,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
| javaBootClassPath = ${ppcp(javaBootClassPath)}
| javaExtDirs = ${ppcp(javaExtDirs)}
| javaUserClassPath = ${ppcp(javaUserClassPath)}
- | useJavaClassPath = $useJavaClassPath
+ | useJavaClassPath = $useJavaClassPath
| scalaBootClassPath = ${ppcp(scalaBootClassPath)}
| scalaExtDirs = ${ppcp(scalaExtDirs)}
| userClassPath = ${ppcp(userClassPath)}
@@ -288,8 +298,10 @@ class PathResolver(settings: Settings, context: JavaContext) {
def containers = Calculated.containers
- lazy val result = {
- val cp = new JavaClassPath(containers.toIndexedSeq, context)
+ import PathResolver.MkLines
+
+ def result: ResultClassPathType = {
+ val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
Console print s"Defaults: ${PathResolver.Defaults}"
@@ -301,5 +313,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
cp
}
- def asURLs = result.asURLs
+ @deprecated("Use resultAsURLs instead of this one", "2.11.5")
+ def asURLs: List[URL] = resultAsURLs.toList
+
+ protected def computeResult(): ResultClassPathType
+}
+
+class PathResolver(settings: Settings, context: JavaContext)
+ extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) {
+
+ def this(settings: Settings) =
+ this(settings,
+ if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
+ else DefaultJavaContext)
+
+ override protected def computeResult(): JavaClassPath =
+ new JavaClassPath(containers.toIndexedSeq, context)
+}
+
+class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath])
+ extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) {
+
+ def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings))
+
+ override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq)
+}
+
+object PathResolverFactory {
+
+ def create(settings: Settings): PathResolverResult =
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings)
+ case ClassPathRepresentationType.Recursive => new PathResolver(settings)
+ }
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1d39a59cf4..7858bf0658 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -28,12 +28,12 @@ trait CompileOutputCommon {
* @author Martin Odersky
* @version 1.0
*/
-abstract class SocketServer extends CompileOutputCommon {
+abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon {
def shutdown: Boolean
def session(): Unit
def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup
// a hook for subclasses
- protected def createServerSocket(): ServerSocket = new ServerSocket(0)
+ protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort)
var in: BufferedReader = _
var out: PrintWriter = _