aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.10
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-11-11 21:36:48 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-11-11 21:36:48 -0800
commitdaaca14c16dc2c1abc98f15ab8c6f7c14761b627 (patch)
treede60da38655f8a7d4b0712872b08a7aedf73460f /repl/scala-2.10
parent2ddb1415e2bea94004947506ded090c2e8ff8dad (diff)
downloadspark-daaca14c16dc2c1abc98f15ab8c6f7c14761b627.tar.gz
spark-daaca14c16dc2c1abc98f15ab8c6f7c14761b627.tar.bz2
spark-daaca14c16dc2c1abc98f15ab8c6f7c14761b627.zip
Support cross building for Scala 2.11
Let's give this another go using a version of Hive that shades its JLine dependency. Author: Prashant Sharma <prashant.s@imaginea.com> Author: Patrick Wendell <pwendell@gmail.com> Closes #3159 from pwendell/scala-2.11-prashant and squashes the following commits: e93aa3e [Patrick Wendell] Restoring -Phive-thriftserver profile and cleaning up build script. f65d17d [Patrick Wendell] Fixing build issue due to merge conflict a8c41eb [Patrick Wendell] Reverting dev/run-tests back to master state. 7a6eb18 [Patrick Wendell] Merge remote-tracking branch 'apache/master' into scala-2.11-prashant 583aa07 [Prashant Sharma] REVERT ME: removed hive thirftserver 3680e58 [Prashant Sharma] Revert "REVERT ME: Temporarily removing some Cli tests." 935fb47 [Prashant Sharma] Revert "Fixed by disabling a few tests temporarily." 925e90f [Prashant Sharma] Fixed by disabling a few tests temporarily. 2fffed3 [Prashant Sharma] Exclude groovy from sbt build, and also provide a way for such instances in future. 8bd4e40 [Prashant Sharma] Switched to gmaven plus, it fixes random failures observer with its predecessor gmaven. 5272ce5 [Prashant Sharma] SPARK_SCALA_VERSION related bugs. 2121071 [Patrick Wendell] Migrating version detection to PySpark b1ed44d [Patrick Wendell] REVERT ME: Temporarily removing some Cli tests. 1743a73 [Patrick Wendell] Removing decimal test that doesn't work with Scala 2.11 f5cad4e [Patrick Wendell] Add Scala 2.11 docs 210d7e1 [Patrick Wendell] Revert "Testing new Hive version with shaded jline" 48518ce [Patrick Wendell] Remove association of Hive and Thriftserver profiles. e9d0a06 [Patrick Wendell] Revert "Enable thritfserver for Scala 2.10 only" 67ec364 [Patrick Wendell] Guard building of thriftserver around Scala 2.10 check 8502c23 [Patrick Wendell] Enable thritfserver for Scala 2.10 only e22b104 [Patrick Wendell] Small fix in pom file ec402ab [Patrick Wendell] Various fixes 0be5a9d [Patrick Wendell] Testing new Hive version with shaded jline 4eaec65 [Prashant Sharma] Changed scripts to ignore target. 5167bea [Prashant Sharma] small correction a4fcac6 [Prashant Sharma] Run against scala 2.11 on jenkins. 80285f4 [Prashant Sharma] MAven equivalent of setting spark.executor.extraClasspath during tests. 034b369 [Prashant Sharma] Setting test jars on executor classpath during tests from sbt. d4874cb [Prashant Sharma] Fixed Python Runner suite. null check should be first case in scala 2.11. 6f50f13 [Prashant Sharma] Fixed build after rebasing with master. We should use ${scala.binary.version} instead of just 2.10 e56ca9d [Prashant Sharma] Print an error if build for 2.10 and 2.11 is spotted. 937c0b8 [Prashant Sharma] SCALA_VERSION -> SPARK_SCALA_VERSION cb059b0 [Prashant Sharma] Code review 0476e5e [Prashant Sharma] Scala 2.11 support with repl and all build changes.
Diffstat (limited to 'repl/scala-2.10')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/Main.scala33
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala37
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala114
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkHelper.scala22
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala1091
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala147
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala1445
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkImports.scala238
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala377
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala90
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala232
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala32
-rw-r--r--repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala318
13 files changed, 4176 insertions, 0 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/Main.scala
new file mode 100644
index 0000000000..14b448d076
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/Main.scala
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import scala.collection.mutable.Set
+
+object Main {
+ private var _interp: SparkILoop = _
+
+ def interp = _interp
+
+ def interp_=(i: SparkILoop) { _interp = i }
+
+ def main(args: Array[String]) {
+ _interp = new SparkILoop
+ _interp.process(args)
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
new file mode 100644
index 0000000000..05816941b5
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc.{Settings, CompilerCommand}
+import scala.Predef._
+
+/**
+ * Command class enabling Spark-specific command line options (provided by
+ * <i>org.apache.spark.repl.SparkRunnerSettings</i>).
+ */
+class SparkCommandLine(args: List[String], override val settings: Settings)
+ extends CompilerCommand(args, settings) {
+
+ def this(args: List[String], error: String => Unit) {
+ this(args, new SparkRunnerSettings(error))
+ }
+
+ def this(args: List[String]) {
+ this(args, str => Console.println("Error: " + str))
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala
new file mode 100644
index 0000000000..f8432c8af6
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala
@@ -0,0 +1,114 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.nsc.ast.parser.Tokens.EOF
+
+import org.apache.spark.Logging
+
+trait SparkExprTyper extends Logging {
+ val repl: SparkIMain
+
+ import repl._
+ import global.{ reporter => _, Import => _, _ }
+ import definitions._
+ import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
+ import naming.freshInternalVarName
+
+ object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
+ def applyRule[T](code: String, rule: UnitParser => T): T = {
+ reporter.reset()
+ val scanner = newUnitParser(code)
+ val result = rule(scanner)
+
+ if (!reporter.hasErrors)
+ scanner.accept(EOF)
+
+ result
+ }
+
+ def defns(code: String) = stmts(code) collect { case x: DefTree => x }
+ def expr(code: String) = applyRule(code, _.expr())
+ def stmts(code: String) = applyRule(code, _.templateStats())
+ def stmt(code: String) = stmts(code).last // guaranteed nonempty
+ }
+
+ /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
+ def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
+ var isIncomplete = false
+ reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+ val trees = codeParser.stmts(line)
+ if (reporter.hasErrors) {
+ Some(Nil)
+ } else if (isIncomplete) {
+ None
+ } else {
+ Some(trees)
+ }
+ }
+ }
+ // def parsesAsExpr(line: String) = {
+ // import codeParser._
+ // (opt expr line).isDefined
+ // }
+
+ def symbolOfLine(code: String): Symbol = {
+ def asExpr(): Symbol = {
+ val name = freshInternalVarName()
+ // Typing it with a lazy val would give us the right type, but runs
+ // into compiler bugs with things like existentials, so we compile it
+ // behind a def and strip the NullaryMethodType which wraps the expr.
+ val line = "def " + name + " = {\n" + code + "\n}"
+
+ interpretSynthetic(line) match {
+ case IR.Success =>
+ val sym0 = symbolOfTerm(name)
+ // drop NullaryMethodType
+ val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
+ if (sym.info.typeSymbol eq UnitClass) NoSymbol else sym
+ case _ => NoSymbol
+ }
+ }
+ def asDefn(): Symbol = {
+ val old = repl.definedSymbolList.toSet
+
+ interpretSynthetic(code) match {
+ case IR.Success =>
+ repl.definedSymbolList filterNot old match {
+ case Nil => NoSymbol
+ case sym :: Nil => sym
+ case syms => NoSymbol.newOverloaded(NoPrefix, syms)
+ }
+ case _ => NoSymbol
+ }
+ }
+ beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+ }
+
+ private var typeOfExpressionDepth = 0
+ def typeOfExpression(expr: String, silent: Boolean = true): Type = {
+ if (typeOfExpressionDepth > 2) {
+ logDebug("Terminating typeOfExpression recursion for expression: " + expr)
+ return NoType
+ }
+ typeOfExpressionDepth += 1
+ // Don't presently have a good way to suppress undesirable success output
+ // while letting errors through, so it is first trying it silently: if there
+ // is an error, and errors are desired, then it re-evaluates non-silently
+ // to induce the error message.
+ try beSilentDuring(symbolOfLine(expr).tpe) match {
+ case NoType if !silent => symbolOfLine(expr).tpe // generate error
+ case tpe => tpe
+ }
+ finally typeOfExpressionDepth -= 1
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkHelper.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkHelper.scala
new file mode 100644
index 0000000000..5340951d91
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkHelper.scala
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package scala.tools.nsc
+
+object SparkHelper {
+ def explicitParentLoader(settings: Settings) = settings.explicitParentLoader
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
new file mode 100644
index 0000000000..e56b74edba
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -0,0 +1,1091 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Alexander Spoon
+ */
+
+package org.apache.spark.repl
+
+
+import java.net.URL
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc._
+import scala.tools.nsc.backend.JavaPlatform
+import scala.tools.nsc.interpreter._
+
+import scala.tools.nsc.interpreter.{Results => IR}
+import Predef.{println => _, _}
+import java.io.{BufferedReader, FileReader}
+import java.net.URI
+import java.util.concurrent.locks.ReentrantLock
+import scala.sys.process.Process
+import scala.tools.nsc.interpreter.session._
+import scala.util.Properties.{jdkHome, javaVersion}
+import scala.tools.util.{Javap}
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.ops
+import scala.tools.nsc.util._
+import scala.tools.nsc.interpreter._
+import scala.tools.nsc.io.{File, Directory}
+import scala.reflect.NameTransformer._
+import scala.tools.nsc.util.ScalaClassLoader._
+import scala.tools.util._
+import scala.language.{implicitConversions, existentials, postfixOps}
+import scala.reflect.{ClassTag, classTag}
+import scala.tools.reflect.StdRuntimeTags._
+
+import java.lang.{Class => jClass}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+
+import org.apache.spark.Logging
+import org.apache.spark.SparkConf
+import org.apache.spark.SparkContext
+import org.apache.spark.util.Utils
+
+/** The Scala interactive shell. It provides a read-eval-print loop
+ * around the Interpreter class.
+ * After instantiation, clients should call the main() method.
+ *
+ * If no in0 is specified, then input will come from the console, and
+ * the class will attempt to provide input editing feature such as
+ * input history.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ * @version 1.2
+ */
+class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
+ val master: Option[String])
+ extends AnyRef
+ with LoopCommands
+ with SparkILoopInit
+ with Logging
+{
+ def this(in0: BufferedReader, out: JPrintWriter, master: String) = this(Some(in0), out, Some(master))
+ def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
+ def this() = this(None, new JPrintWriter(Console.out, true), None)
+
+ var in: InteractiveReader = _ // the input stream from which commands come
+ var settings: Settings = _
+ var intp: SparkIMain = _
+
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: SparkIMain): Unit = intp = i
+
+ /** Having inherited the difficult "var-ness" of the repl instance,
+ * I'm trying to work around it by moving operations into a class from
+ * which it will appear a stable prefix.
+ */
+ private def onIntp[T](f: SparkIMain => T): T = f(intp)
+
+ class IMainOps[T <: SparkIMain](val intp: T) {
+ import intp._
+ import global._
+
+ def printAfterTyper(msg: => String) =
+ intp.reporter printMessage afterTyper(msg)
+
+ /** Strip NullaryMethodType artifacts. */
+ private def replInfo(sym: Symbol) = {
+ sym.info match {
+ case NullaryMethodType(restpe) if sym.isAccessor => restpe
+ case info => info
+ }
+ }
+ def echoTypeStructure(sym: Symbol) =
+ printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+ def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+ if (verbose) SparkILoop.this.echo("// Type signature")
+ printAfterTyper("" + replInfo(sym))
+
+ if (verbose) {
+ SparkILoop.this.echo("\n// Internal Type structure")
+ echoTypeStructure(sym)
+ }
+ }
+ }
+ implicit def stabilizeIMain(intp: SparkIMain) = new IMainOps[intp.type](intp)
+
+ /** TODO -
+ * -n normalize
+ * -l label with case class parameter names
+ * -c complete - leave nothing out
+ */
+ private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
+ onIntp { intp =>
+ val sym = intp.symbolOfLine(expr)
+ if (sym.exists) intp.echoTypeSignature(sym, verbose)
+ else ""
+ }
+ }
+
+ var sparkContext: SparkContext = _
+
+ override def echoCommandMessage(msg: String) {
+ intp.reporter printMessage msg
+ }
+
+ // def isAsync = !settings.Yreplsync.value
+ def isAsync = false
+ // lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
+ def history = in.history
+
+ /** The context class loader at the time this object was created */
+ protected val originalClassLoader = Utils.getContextOrSparkClassLoader
+
+ // classpath entries added via :cp
+ var addedClasspath: String = ""
+
+ /** A reverse list of commands to replay if the user requests a :replay */
+ var replayCommandStack: List[String] = Nil
+
+ /** A list of commands to replay if the user requests a :replay */
+ def replayCommands = replayCommandStack.reverse
+
+ /** Record a command for replay should the user request a :replay */
+ def addReplay(cmd: String) = replayCommandStack ::= cmd
+
+ def savingReplayStack[T](body: => T): T = {
+ val saved = replayCommandStack
+ try body
+ finally replayCommandStack = saved
+ }
+ def savingReader[T](body: => T): T = {
+ val saved = in
+ try body
+ finally in = saved
+ }
+
+
+ def sparkCleanUp(){
+ echo("Stopping spark context.")
+ intp.beQuietDuring {
+ command("sc.stop()")
+ }
+ }
+ /** Close the interpreter and set the var to null. */
+ def closeInterpreter() {
+ if (intp ne null) {
+ sparkCleanUp()
+ intp.close()
+ intp = null
+ }
+ }
+
+ class SparkILoopInterpreter extends SparkIMain(settings, out) {
+ outer =>
+
+ override lazy val formatting = new Formatting {
+ def prompt = SparkILoop.this.prompt
+ }
+ override protected def parentClassLoader = SparkHelper.explicitParentLoader(settings).getOrElse(classOf[SparkILoop].getClassLoader)
+ }
+
+ /** Create a new interpreter. */
+ def createInterpreter() {
+ require(settings != null)
+
+ if (addedClasspath != "") settings.classpath.append(addedClasspath)
+ val addedJars =
+ if (Utils.isWindows) {
+ // Strip any URI scheme prefix so we can add the correct path to the classpath
+ // e.g. file:/C:/my/path.jar -> C:/my/path.jar
+ SparkILoop.getAddedJars.map { jar => new URI(jar).getPath.stripPrefix("/") }
+ } else {
+ SparkILoop.getAddedJars
+ }
+ // work around for Scala bug
+ val totalClassPath = addedJars.foldLeft(
+ settings.classpath.value)((l, r) => ClassPath.join(l, r))
+ this.settings.classpath.value = totalClassPath
+
+ intp = new SparkILoopInterpreter
+ }
+
+ /** print a friendly help message */
+ def helpCommand(line: String): Result = {
+ if (line == "") helpSummary()
+ else uniqueCommand(line) match {
+ case Some(lc) => echo("\n" + lc.longHelp)
+ case _ => ambiguousError(line)
+ }
+ }
+ private def helpSummary() = {
+ val usageWidth = commands map (_.usageMsg.length) max
+ val formatStr = "%-" + usageWidth + "s %s %s"
+
+ echo("All commands can be abbreviated, e.g. :he instead of :help.")
+ echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
+
+ commands foreach { cmd =>
+ val star = if (cmd.hasLongHelp) "*" else " "
+ echo(formatStr.format(cmd.usageMsg, star, cmd.help))
+ }
+ }
+ private def ambiguousError(cmd: String): Result = {
+ matchingCommands(cmd) match {
+ case Nil => echo(cmd + ": no such command. Type :help for help.")
+ case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
+ }
+ Result(true, None)
+ }
+ private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
+ private def uniqueCommand(cmd: String): Option[LoopCommand] = {
+ // this lets us add commands willy-nilly and only requires enough command to disambiguate
+ matchingCommands(cmd) match {
+ case List(x) => Some(x)
+ // exact match OK even if otherwise appears ambiguous
+ case xs => xs find (_.name == cmd)
+ }
+ }
+ private var fallbackMode = false
+
+ private def toggleFallbackMode() {
+ val old = fallbackMode
+ fallbackMode = !old
+ System.setProperty("spark.repl.fallback", fallbackMode.toString)
+ echo(s"""
+ |Switched ${if (old) "off" else "on"} fallback mode without restarting.
+ | If you have defined classes in the repl, it would
+ |be good to redefine them incase you plan to use them. If you still run
+ |into issues it would be good to restart the repl and turn on `:fallback`
+ |mode as first command.
+ """.stripMargin)
+ }
+
+ /** Show the history */
+ lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
+ override def usage = "[num]"
+ def defaultLines = 20
+
+ def apply(line: String): Result = {
+ if (history eq NoHistory)
+ return "No history available."
+
+ val xs = words(line)
+ val current = history.index
+ val count = try xs.head.toInt catch { case _: Exception => defaultLines }
+ val lines = history.asStrings takeRight count
+ val offset = current - lines.size + 1
+
+ for ((line, index) <- lines.zipWithIndex)
+ echo("%3d %s".format(index + offset, line))
+ }
+ }
+
+ // When you know you are most likely breaking into the middle
+ // of a line being typed. This softens the blow.
+ protected def echoAndRefresh(msg: String) = {
+ echo("\n" + msg)
+ in.redrawLine()
+ }
+ protected def echo(msg: String) = {
+ out println msg
+ out.flush()
+ }
+ protected def echoNoNL(msg: String) = {
+ out print msg
+ out.flush()
+ }
+
+ /** Search the history */
+ def searchHistory(_cmdline: String) {
+ val cmdline = _cmdline.toLowerCase
+ val offset = history.index - history.size + 1
+
+ for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
+ echo("%d %s".format(index + offset, line))
+ }
+
+ private var currentPrompt = Properties.shellPromptString
+ def setPrompt(prompt: String) = currentPrompt = prompt
+ /** Prompt to print when awaiting input */
+ def prompt = currentPrompt
+
+ import LoopCommand.{ cmd, nullary }
+
+ /** Standard commands */
+ lazy val standardCommands = List(
+ cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
+ cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
+ historyCommand,
+ cmd("h?", "<string>", "search the history", searchHistory),
+ cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
+ cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
+ cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
+ cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
+ nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
+// nullary("power", "enable power user mode", powerCmd),
+ nullary("quit", "exit the repl", () => Result(false, None)),
+ nullary("replay", "reset execution and replay all previous commands", replay),
+ nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
+ shCommand,
+ nullary("silent", "disable/enable automatic printing of results", verbosity),
+ nullary("fallback", """
+ |disable/enable advanced repl changes, these fix some issues but may introduce others.
+ |This mode will be removed once these fixes stablize""".stripMargin, toggleFallbackMode),
+ cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+ nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
+ )
+
+ /** Power user commands */
+ lazy val powerCommands: List[LoopCommand] = List(
+ // cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
+ )
+
+ // private def dumpCommand(): Result = {
+ // echo("" + power)
+ // history.asStrings takeRight 30 foreach echo
+ // in.redrawLine()
+ // }
+ // private def valsCommand(): Result = power.valsDescription
+
+ private val typeTransforms = List(
+ "scala.collection.immutable." -> "immutable.",
+ "scala.collection.mutable." -> "mutable.",
+ "scala.collection.generic." -> "generic.",
+ "java.lang." -> "jl.",
+ "scala.runtime." -> "runtime."
+ )
+
+ private def importsCommand(line: String): Result = {
+ val tokens = words(line)
+ val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
+ val isVerbose = tokens contains "-v"
+
+ handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
+ case (handler, idx) =>
+ val (types, terms) = handler.importedSymbols partition (_.name.isTypeName)
+ val imps = handler.implicitSymbols
+ val found = tokens filter (handler importsSymbolNamed _)
+ val typeMsg = if (types.isEmpty) "" else types.size + " types"
+ val termMsg = if (terms.isEmpty) "" else terms.size + " terms"
+ val implicitMsg = if (imps.isEmpty) "" else imps.size + " are implicit"
+ val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
+ val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
+
+ intp.reporter.printMessage("%2d) %-30s %s%s".format(
+ idx + 1,
+ handler.importString,
+ statsMsg,
+ foundMsg
+ ))
+ }
+ }
+
+ private def implicitsCommand(line: String): Result = onIntp { intp =>
+ import intp._
+ import global._
+
+ def p(x: Any) = intp.reporter.printMessage("" + x)
+
+ // If an argument is given, only show a source with that
+ // in its name somewhere.
+ val args = line split "\\s+"
+ val filtered = intp.implicitSymbolsBySource filter {
+ case (source, syms) =>
+ (args contains "-v") || {
+ if (line == "") (source.fullName.toString != "scala.Predef")
+ else (args exists (source.name.toString contains _))
+ }
+ }
+
+ if (filtered.isEmpty)
+ return "No implicits have been imported other than those in Predef."
+
+ filtered foreach {
+ case (source, syms) =>
+ p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
+
+ // This groups the members by where the symbol is defined
+ val byOwner = syms groupBy (_.owner)
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
+
+ sortedOwners foreach {
+ case (owner, members) =>
+ // Within each owner, we cluster results based on the final result type
+ // if there are more than a couple, and sort each cluster based on name.
+ // This is really just trying to make the 100 or so implicits imported
+ // by default into something readable.
+ val memberGroups: List[List[Symbol]] = {
+ val groups = members groupBy (_.tpe.finalResultType) toList
+ val (big, small) = groups partition (_._2.size > 3)
+ val xss = (
+ (big sortBy (_._1.toString) map (_._2)) :+
+ (small flatMap (_._2))
+ )
+
+ xss map (xs => xs sortBy (_.name.toString))
+ }
+
+ val ownerMessage = if (owner == source) " defined in " else " inherited from "
+ p(" /* " + members.size + ownerMessage + owner.fullName + " */")
+
+ memberGroups foreach { group =>
+ group foreach (s => p(" " + intp.symbolDefString(s)))
+ p("")
+ }
+ }
+ p("")
+ }
+ }
+
+ private def findToolsJar() = {
+ val jdkPath = Directory(jdkHome)
+ val jar = jdkPath / "lib" / "tools.jar" toFile;
+
+ if (jar isFile)
+ Some(jar)
+ else if (jdkPath.isDirectory)
+ jdkPath.deepFiles find (_.name == "tools.jar")
+ else None
+ }
+ private def addToolsJarToLoader() = {
+ val cl = findToolsJar match {
+ case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ case _ => intp.classLoader
+ }
+ if (Javap.isAvailable(cl)) {
+ logDebug(":javap available.")
+ cl
+ }
+ else {
+ logDebug(":javap unavailable: no tools.jar at " + jdkHome)
+ intp.classLoader
+ }
+ }
+
+ protected def newJavap() = new JavapClass(addToolsJarToLoader(), new SparkIMain.ReplStrippingWriter(intp)) {
+ override def tryClass(path: String): Array[Byte] = {
+ val hd :: rest = path split '.' toList;
+ // If there are dots in the name, the first segment is the
+ // key to finding it.
+ if (rest.nonEmpty) {
+ intp optFlatName hd match {
+ case Some(flat) =>
+ val clazz = flat :: rest mkString NAME_JOIN_STRING
+ val bytes = super.tryClass(clazz)
+ if (bytes.nonEmpty) bytes
+ else super.tryClass(clazz + MODULE_SUFFIX_STRING)
+ case _ => super.tryClass(path)
+ }
+ }
+ else {
+ // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
+ // we have to drop the $ to find object Foo, then tack it back onto
+ // the end of the flattened name.
+ def className = intp flatName path
+ def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
+
+ val bytes = super.tryClass(className)
+ if (bytes.nonEmpty) bytes
+ else super.tryClass(moduleName)
+ }
+ }
+ }
+ // private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
+ private lazy val javap =
+ try newJavap()
+ catch { case _: Exception => null }
+
+ // Still todo: modules.
+ private def typeCommand(line0: String): Result = {
+ line0.trim match {
+ case "" => ":type [-v] <expression>"
+ case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
+ case s => typeCommandInternal(s, false)
+ }
+ }
+
+ private def warningsCommand(): Result = {
+ if (intp.lastWarnings.isEmpty)
+ "Can't find any cached warnings."
+ else
+ intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
+ }
+
+ private def javapCommand(line: String): Result = {
+ if (javap == null)
+ ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ else if (javaVersion startsWith "1.7")
+ ":javap not yet working with java 1.7"
+ else if (line == "")
+ ":javap [-lcsvp] [path1 path2 ...]"
+ else
+ javap(words(line)) foreach { res =>
+ if (res.isError) return "Failed: " + res.value
+ else res.show()
+ }
+ }
+
+ private def wrapCommand(line: String): Result = {
+ def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
+ onIntp { intp =>
+ import intp._
+ import global._
+
+ words(line) match {
+ case Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => "Current execution wrapper: " + s
+ }
+ case "clear" :: Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
+ }
+ case wrapper :: Nil =>
+ intp.typeOfExpression(wrapper) match {
+ case PolyType(List(targ), MethodType(List(arg), restpe)) =>
+ intp setExecutionWrapper intp.pathToTerm(wrapper)
+ "Set wrapper to '" + wrapper + "'"
+ case tp =>
+ failMsg + "\nFound: <unknown>"
+ }
+ case _ => failMsg
+ }
+ }
+ }
+
+ private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
+ // private def phaseCommand(name: String): Result = {
+ // val phased: Phased = power.phased
+ // import phased.NoPhaseName
+
+ // if (name == "clear") {
+ // phased.set(NoPhaseName)
+ // intp.clearExecutionWrapper()
+ // "Cleared active phase."
+ // }
+ // else if (name == "") phased.get match {
+ // case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
+ // case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get)
+ // }
+ // else {
+ // val what = phased.parse(name)
+ // if (what.isEmpty || !phased.set(what))
+ // "'" + name + "' does not appear to represent a valid phase."
+ // else {
+ // intp.setExecutionWrapper(pathToPhaseWrapper)
+ // val activeMessage =
+ // if (what.toString.length == name.length) "" + what
+ // else "%s (%s)".format(what, name)
+
+ // "Active phase is now: " + activeMessage
+ // }
+ // }
+ // }
+
+ /** Available commands */
+ def commands: List[LoopCommand] = standardCommands /*++ (
+ if (isReplPower) powerCommands else Nil
+ )*/
+
+ private val replayQuestionMessage =
+ """|That entry seems to have slain the compiler. Shall I replay
+ |your session? I can re-run each line except the last one.
+ |[y/n]
+ """.trim.stripMargin
+
+ private def crashRecovery(ex: Throwable): Boolean = {
+ echo(ex.toString)
+ ex match {
+ case _: NoSuchMethodError | _: NoClassDefFoundError =>
+ echo("\nUnrecoverable error.")
+ throw ex
+ case _ =>
+ def fn(): Boolean =
+ try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+ catch { case _: RuntimeException => false }
+
+ if (fn()) replay()
+ else echo("\nAbandoning crashed session.")
+ }
+ true
+ }
+
+ /** The main read-eval-print loop for the repl. It calls
+ * command() for each line of input, and stops when
+ * command() returns false.
+ */
+ def loop() {
+ def readOneLine() = {
+ out.flush()
+ in readLine prompt
+ }
+ // return false if repl should exit
+ def processLine(line: String): Boolean = {
+ if (isAsync) {
+ if (!awaitInitialized()) return false
+ runThunks()
+ }
+ if (line eq null) false // assume null means EOF
+ else command(line) match {
+ case Result(false, _) => false
+ case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
+ case _ => true
+ }
+ }
+ def innerLoop() {
+ val shouldContinue = try {
+ processLine(readOneLine())
+ } catch {case t: Throwable => crashRecovery(t)}
+ if (shouldContinue)
+ innerLoop()
+ }
+ innerLoop()
+ }
+
+ /** interpret all lines from a specified file */
+ def interpretAllFrom(file: File) {
+ savingReader {
+ savingReplayStack {
+ file applyReader { reader =>
+ in = SimpleReader(reader, out, false)
+ echo("Loading " + file + "...")
+ loop()
+ }
+ }
+ }
+ }
+
+ /** create a new interpreter and replay the given commands */
+ def replay() {
+ reset()
+ if (replayCommandStack.isEmpty)
+ echo("Nothing to replay.")
+ else for (cmd <- replayCommands) {
+ echo("Replaying: " + cmd) // flush because maybe cmd will have its own output
+ command(cmd)
+ echo("")
+ }
+ }
+ def resetCommand() {
+ echo("Resetting repl state.")
+ if (replayCommandStack.nonEmpty) {
+ echo("Forgetting this session history:\n")
+ replayCommands foreach echo
+ echo("")
+ replayCommandStack = Nil
+ }
+ if (intp.namedDefinedTerms.nonEmpty)
+ echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+ if (intp.definedTypes.nonEmpty)
+ echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+
+ reset()
+ }
+
+ def reset() {
+ intp.reset()
+ // unleashAndSetPhase()
+ }
+
+ /** fork a shell and run a command */
+ lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
+ override def usage = "<command line>"
+ def apply(line: String): Result = line match {
+ case "" => showUsage()
+ case _ =>
+ val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")"
+ intp interpret toRun
+ ()
+ }
+ }
+
+ def withFile(filename: String)(action: File => Unit) {
+ val f = File(filename)
+
+ if (f.exists) action(f)
+ else echo("That file does not exist")
+ }
+
+ def loadCommand(arg: String) = {
+ var shouldReplay: Option[String] = None
+ withFile(arg)(f => {
+ interpretAllFrom(f)
+ shouldReplay = Some(":load " + arg)
+ })
+ Result(true, shouldReplay)
+ }
+
+ def addAllClasspath(args: Seq[String]): Unit = {
+ var added = false
+ var totalClasspath = ""
+ for (arg <- args) {
+ val f = File(arg).normalize
+ if (f.exists) {
+ added = true
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ sparkContext.addJar(f.toURI.toURL.getPath)
+ }
+ }
+ }
+
+ def addClasspath(arg: String): Unit = {
+ val f = File(arg).normalize
+ if (f.exists) {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ sparkContext.addJar(f.toURI.toURL.getPath)
+ echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClasspathString))
+ }
+ else echo("The path '" + f + "' doesn't seem to exist.")
+ }
+
+
+ def powerCmd(): Result = {
+ if (isReplPower) "Already in power mode."
+ else enablePowerMode(false)
+ }
+
+ def enablePowerMode(isDuringInit: Boolean) = {
+ // replProps.power setValue true
+ // unleashAndSetPhase()
+ // asyncEcho(isDuringInit, power.banner)
+ }
+ // private def unleashAndSetPhase() {
+// if (isReplPower) {
+// // power.unleash()
+// // Set the phase to "typer"
+// intp beSilentDuring phaseCommand("typer")
+// }
+// }
+
+ def asyncEcho(async: Boolean, msg: => String) {
+ if (async) asyncMessage(msg)
+ else echo(msg)
+ }
+
+ def verbosity() = {
+ // val old = intp.printResults
+ // intp.printResults = !old
+ // echo("Switched " + (if (old) "off" else "on") + " result printing.")
+ }
+
+ /** Run one command submitted by the user. Two values are returned:
+ * (1) whether to keep running, (2) the line to record for replay,
+ * if any. */
+ def command(line: String): Result = {
+ if (line startsWith ":") {
+ val cmd = line.tail takeWhile (x => !x.isWhitespace)
+ uniqueCommand(cmd) match {
+ case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
+ case _ => ambiguousError(cmd)
+ }
+ }
+ else if (intp.global == null) Result(false, None) // Notice failure to create compiler
+ else Result(true, interpretStartingWith(line))
+ }
+
+ private def readWhile(cond: String => Boolean) = {
+ Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
+ }
+
+ def pasteCommand(): Result = {
+ echo("// Entering paste mode (ctrl-D to finish)\n")
+ val code = readWhile(_ => true) mkString "\n"
+ echo("\n// Exiting paste mode, now interpreting.\n")
+ intp interpret code
+ ()
+ }
+
+ private object paste extends Pasted {
+ val ContinueString = " | "
+ val PromptString = "scala> "
+
+ def interpret(line: String): Unit = {
+ echo(line.trim)
+ intp interpret line
+ echo("")
+ }
+
+ def transcript(start: String) = {
+ echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
+ apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
+ }
+ }
+ import paste.{ ContinueString, PromptString }
+
+ /** Interpret expressions starting with the first line.
+ * Read lines until a complete compilation unit is available
+ * or until a syntax error has been seen. If a full unit is
+ * read, go ahead and interpret it. Return the full string
+ * to be recorded for replay, if any.
+ */
+ def interpretStartingWith(code: String): Option[String] = {
+ // signal completion non-completion input has been received
+ in.completion.resetVerbosity()
+
+ def reallyInterpret = {
+ val reallyResult = intp.interpret(code)
+ (reallyResult, reallyResult match {
+ case IR.Error => None
+ case IR.Success => Some(code)
+ case IR.Incomplete =>
+ if (in.interactive && code.endsWith("\n\n")) {
+ echo("You typed two blank lines. Starting a new command.")
+ None
+ }
+ else in.readLine(ContinueString) match {
+ case null =>
+ // we know compilation is going to fail since we're at EOF and the
+ // parser thinks the input is still incomplete, but since this is
+ // a file being read non-interactively we want to fail. So we send
+ // it straight to the compiler for the nice error message.
+ intp.compileString(code)
+ None
+
+ case line => interpretStartingWith(code + "\n" + line)
+ }
+ })
+ }
+
+ /** Here we place ourselves between the user and the interpreter and examine
+ * the input they are ostensibly submitting. We intervene in several cases:
+ *
+ * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+ * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+ * on the previous result.
+ * 3) If the Completion object's execute returns Some(_), we inject that value
+ * and avoid the interpreter, as it's likely not valid scala code.
+ */
+ if (code == "") None
+ else if (!paste.running && code.trim.startsWith(PromptString)) {
+ paste.transcript(code)
+ None
+ }
+ else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
+ interpretStartingWith(intp.mostRecentVar + code)
+ }
+ else if (code.trim startsWith "//") {
+ // line comment, do nothing
+ None
+ }
+ else
+ reallyInterpret._2
+ }
+
+ // runs :load `file` on any files passed via -i
+ def loadFiles(settings: Settings) = settings match {
+ case settings: SparkRunnerSettings =>
+ for (filename <- settings.loadfiles.value) {
+ val cmd = ":load " + filename
+ command(cmd)
+ addReplay(cmd)
+ echo("")
+ }
+ case _ =>
+ }
+
+ /** Tries to create a JLineReader, falling back to SimpleReader:
+ * unless settings or properties are such that it should start
+ * with SimpleReader.
+ */
+ def chooseReader(settings: Settings): InteractiveReader = {
+ if (settings.Xnojline.value || Properties.isEmacsShell)
+ SimpleReader()
+ else try new SparkJLineReader(
+ if (settings.noCompletion.value) NoCompletion
+ else new SparkJLineCompletion(intp)
+ )
+ catch {
+ case ex @ (_: Exception | _: NoClassDefFoundError) =>
+ echo("Failed to created SparkJLineReader: " + ex + "\nFalling back to SimpleReader.")
+ SimpleReader()
+ }
+ }
+
+ val u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
+ val m = u.runtimeMirror(Utils.getSparkClassLoader)
+ private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ u.TypeTag[T](
+ m,
+ new TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+ m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+ })
+
+ def process(settings: Settings): Boolean = savingContextLoader {
+ if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
+
+ this.settings = settings
+ createInterpreter()
+
+ // sets in to some kind of reader depending on environmental cues
+ in = in0 match {
+ case Some(reader) => SimpleReader(reader, out, true)
+ case None =>
+ // some post-initialization
+ chooseReader(settings) match {
+ case x: SparkJLineReader => addThunk(x.consoleReader.postInit) ; x
+ case x => x
+ }
+ }
+ lazy val tagOfSparkIMain = tagOfStaticClass[org.apache.spark.repl.SparkIMain]
+ // Bind intp somewhere out of the regular namespace where
+ // we can get at it in generated code.
+ addThunk(intp.quietBind(NamedParam[SparkIMain]("$intp", intp)(tagOfSparkIMain, classTag[SparkIMain])))
+ addThunk({
+ import scala.tools.nsc.io._
+ import Properties.userHome
+ import scala.compat.Platform.EOL
+ val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
+ if (autorun.isDefined) intp.quietRun(autorun.get)
+ })
+
+ addThunk(printWelcome())
+ addThunk(initializeSpark())
+
+ // it is broken on startup; go ahead and exit
+ if (intp.reporter.hasErrors)
+ return false
+
+ // This is about the illusion of snappiness. We call initialize()
+ // which spins off a separate thread, then print the prompt and try
+ // our best to look ready. The interlocking lazy vals tend to
+ // inter-deadlock, so we break the cycle with a single asynchronous
+ // message to an actor.
+ if (isAsync) {
+ intp initialize initializedCallback()
+ createAsyncListener() // listens for signal to run postInitialization
+ }
+ else {
+ intp.initializeSynchronous()
+ postInitialization()
+ }
+ // printWelcome()
+
+ loadFiles(settings)
+
+ try loop()
+ catch AbstractOrMissingHandler()
+ finally closeInterpreter()
+
+ true
+ }
+
+ def createSparkContext(): SparkContext = {
+ val execUri = System.getenv("SPARK_EXECUTOR_URI")
+ val jars = SparkILoop.getAddedJars
+ val conf = new SparkConf()
+ .setMaster(getMaster())
+ .setAppName("Spark shell")
+ .setJars(jars)
+ .set("spark.repl.class.uri", intp.classServer.uri)
+ if (execUri != null) {
+ conf.set("spark.executor.uri", execUri)
+ }
+ sparkContext = new SparkContext(conf)
+ logInfo("Created spark context..")
+ sparkContext
+ }
+
+ private def getMaster(): String = {
+ val master = this.master match {
+ case Some(m) => m
+ case None =>
+ val envMaster = sys.env.get("MASTER")
+ val propMaster = sys.props.get("spark.master")
+ propMaster.orElse(envMaster).getOrElse("local[*]")
+ }
+ master
+ }
+
+ /** process command-line arguments and do as they request */
+ def process(args: Array[String]): Boolean = {
+ val command = new SparkCommandLine(args.toList, msg => echo(msg))
+ def neededHelp(): String =
+ (if (command.settings.help.value) command.usageMsg + "\n" else "") +
+ (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
+
+ // if they asked for no help and command is valid, we call the real main
+ neededHelp() match {
+ case "" => command.ok && process(command.settings)
+ case help => echoNoNL(help) ; true
+ }
+ }
+
+ @deprecated("Use `process` instead", "2.9.0")
+ def main(settings: Settings): Unit = process(settings)
+}
+
+object SparkILoop {
+ implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
+ private def echo(msg: String) = Console println msg
+
+ def getAddedJars: Array[String] = {
+ val envJars = sys.env.get("ADD_JARS")
+ val propJars = sys.props.get("spark.jars").flatMap { p =>
+ if (p == "") None else Some(p)
+ }
+ val jars = propJars.orElse(envJars).getOrElse("")
+ Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
+ }
+
+ // Designed primarily for use by test code: take a String with a
+ // bunch of code, and prints out a transcript of what it would look
+ // like if you'd just typed it into the repl.
+ def runForTranscript(code: String, settings: Settings): String = {
+ import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+ stringFromStream { ostream =>
+ Console.withOut(ostream) {
+ val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
+ override def write(str: String) = {
+ // completely skip continuation lines
+ if (str forall (ch => ch.isWhitespace || ch == '|')) ()
+ // print a newline on empty scala prompts
+ else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n")
+ else super.write(str)
+ }
+ }
+ val input = new BufferedReader(new StringReader(code)) {
+ override def readLine(): String = {
+ val s = super.readLine()
+ // helping out by printing the line being interpreted.
+ if (s != null)
+ output.println(s)
+ s
+ }
+ }
+ val repl = new SparkILoop(input, output)
+
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+
+ getAddedJars.foreach(settings.classpath.append(_))
+
+ repl process settings
+ }
+ }
+ }
+
+ /** Creates an interpreter loop with default settings and feeds
+ * the given code to it as input.
+ */
+ def run(code: String, sets: Settings = new Settings): String = {
+ import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+ stringFromStream { ostream =>
+ Console.withOut(ostream) {
+ val input = new BufferedReader(new StringReader(code))
+ val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
+ val repl = new ILoop(input, output)
+
+ if (sets.classpath.isDefault)
+ sets.classpath.value = sys.props("java.class.path")
+
+ repl process sets
+ }
+ }
+ }
+ def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
new file mode 100644
index 0000000000..7667a9c119
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -0,0 +1,147 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.reflect.internal.util.Position
+import scala.util.control.Exception.ignoring
+import scala.tools.nsc.util.stackTraceString
+
+import org.apache.spark.SPARK_VERSION
+
+/**
+ * Machinery for the asynchronous initialization of the repl.
+ */
+trait SparkILoopInit {
+ self: SparkILoop =>
+
+ /** Print a welcome message */
+ def printWelcome() {
+ echo("""Welcome to
+ ____ __
+ / __/__ ___ _____/ /__
+ _\ \/ _ \/ _ `/ __/ '_/
+ /___/ .__/\_,_/_/ /_/\_\ version %s
+ /_/
+""".format(SPARK_VERSION))
+ import Properties._
+ val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
+ versionString, javaVmName, javaVersion)
+ echo(welcomeMsg)
+ echo("Type in expressions to have them evaluated.")
+ echo("Type :help for more information.")
+ }
+
+ protected def asyncMessage(msg: String) {
+ if (isReplInfo || isReplPower)
+ echoAndRefresh(msg)
+ }
+
+ private val initLock = new java.util.concurrent.locks.ReentrantLock()
+ private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
+ private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
+ private val initStart = System.nanoTime
+
+ private def withLock[T](body: => T): T = {
+ initLock.lock()
+ try body
+ finally initLock.unlock()
+ }
+ // a condition used to ensure serial access to the compiler.
+ @volatile private var initIsComplete = false
+ @volatile private var initError: String = null
+ private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
+
+ // the method to be called when the interpreter is initialized.
+ // Very important this method does nothing synchronous (i.e. do
+ // not try to use the interpreter) because until it returns, the
+ // repl's lazy val `global` is still locked.
+ protected def initializedCallback() = withLock(initCompilerCondition.signal())
+
+ // Spins off a thread which awaits a single message once the interpreter
+ // has been initialized.
+ protected def createAsyncListener() = {
+ io.spawn {
+ withLock(initCompilerCondition.await())
+ asyncMessage("[info] compiler init time: " + elapsed() + " s.")
+ postInitialization()
+ }
+ }
+
+ // called from main repl loop
+ protected def awaitInitialized(): Boolean = {
+ if (!initIsComplete)
+ withLock { while (!initIsComplete) initLoopCondition.await() }
+ if (initError != null) {
+ println("""
+ |Failed to initialize the REPL due to an unexpected error.
+ |This is a bug, please, report it along with the error diagnostics printed below.
+ |%s.""".stripMargin.format(initError)
+ )
+ false
+ } else true
+ }
+ // private def warningsThunks = List(
+ // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
+ // )
+
+ protected def postInitThunks = List[Option[() => Unit]](
+ Some(intp.setContextClassLoader _),
+ if (isReplPower) Some(() => enablePowerMode(true)) else None
+ ).flatten
+ // ++ (
+ // warningsThunks
+ // )
+ // called once after init condition is signalled
+ protected def postInitialization() {
+ try {
+ postInitThunks foreach (f => addThunk(f()))
+ runThunks()
+ } catch {
+ case ex: Throwable =>
+ initError = stackTraceString(ex)
+ throw ex
+ } finally {
+ initIsComplete = true
+
+ if (isAsync) {
+ asyncMessage("[info] total init time: " + elapsed() + " s.")
+ withLock(initLoopCondition.signal())
+ }
+ }
+ }
+
+ def initializeSpark() {
+ intp.beQuietDuring {
+ command("""
+ @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
+ """)
+ command("import org.apache.spark.SparkContext._")
+ }
+ echo("Spark context available as sc.")
+ }
+
+ // code to be executed only after the interpreter is initialized
+ // and the lazy val `global` can be accessed without risk of deadlock.
+ private var pendingThunks: List[() => Unit] = Nil
+ protected def addThunk(body: => Unit) = synchronized {
+ pendingThunks :+= (() => body)
+ }
+ protected def runThunks(): Unit = synchronized {
+ if (pendingThunks.nonEmpty)
+ logDebug("Clearing " + pendingThunks.size + " thunks.")
+
+ while (pendingThunks.nonEmpty) {
+ val thunk = pendingThunks.head
+ pendingThunks = pendingThunks.tail
+ thunk()
+ }
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala
new file mode 100644
index 0000000000..646c68e60c
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -0,0 +1,1445 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package org.apache.spark.repl
+
+import java.io.File
+
+import scala.tools.nsc._
+import scala.tools.nsc.backend.JavaPlatform
+import scala.tools.nsc.interpreter._
+
+import Predef.{ println => _, _ }
+import scala.tools.nsc.util.{MergedClassPath, stringFromWriter, ScalaClassLoader, stackTraceString}
+import scala.reflect.internal.util._
+import java.net.URL
+import scala.sys.BooleanProp
+import io.{AbstractFile, PlainFile, VirtualDirectory}
+
+import reporters._
+import symtab.Flags
+import scala.reflect.internal.Names
+import scala.tools.util.PathResolver
+import ScalaClassLoader.URLClassLoader
+import scala.tools.nsc.util.Exceptional.unwrap
+import scala.collection.{ mutable, immutable }
+import scala.util.control.Exception.{ ultimately }
+import SparkIMain._
+import java.util.concurrent.Future
+import typechecker.Analyzer
+import scala.language.implicitConversions
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.tools.reflect.StdRuntimeTags._
+import scala.util.control.ControlThrowable
+
+import org.apache.spark.{Logging, HttpServer, SecurityManager, SparkConf}
+import org.apache.spark.util.Utils
+
+// /** directory to save .class files to */
+// private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("((memory))", None) {
+// private def pp(root: AbstractFile, indentLevel: Int) {
+// val spaces = " " * indentLevel
+// out.println(spaces + root.name)
+// if (root.isDirectory)
+// root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+// }
+// // print the contents hierarchically
+// def show() = pp(this, 0)
+// }
+
+ /** An interpreter for Scala code.
+ *
+ * The main public entry points are compile(), interpret(), and bind().
+ * The compile() method loads a complete Scala file. The interpret() method
+ * executes one line of Scala code at the request of the user. The bind()
+ * method binds an object to a variable that can then be used by later
+ * interpreted code.
+ *
+ * The overall approach is based on compiling the requested code and then
+ * using a Java classloader and Java reflection to run the code
+ * and access its results.
+ *
+ * In more detail, a single compiler instance is used
+ * to accumulate all successfully compiled or interpreted Scala code. To
+ * "interpret" a line of code, the compiler generates a fresh object that
+ * includes the line of code and which has public member(s) to export
+ * all variables defined by that code. To extract the result of an
+ * interpreted line to show the user, a second "result object" is created
+ * which imports the variables exported by the above object and then
+ * exports members called "$eval" and "$print". To accomodate user expressions
+ * that read from variables or methods defined in previous statements, "import"
+ * statements are used.
+ *
+ * This interpreter shares the strengths and weaknesses of using the
+ * full compiler-to-Java. The main strength is that interpreted code
+ * behaves exactly as does compiled code, including running at full speed.
+ * The main weakness is that redefining classes and methods is not handled
+ * properly, because rebinding at the Java level is technically difficult.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ */
+ class SparkIMain(
+ initialSettings: Settings,
+ val out: JPrintWriter,
+ propagateExceptions: Boolean = false)
+ extends SparkImports with Logging { imain =>
+
+ val conf = new SparkConf()
+
+ val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
+ /** Local directory to save .class files too */
+ lazy val outputDir = {
+ val tmp = System.getProperty("java.io.tmpdir")
+ val rootDir = conf.get("spark.repl.classdir", tmp)
+ Utils.createTempDir(rootDir)
+ }
+ if (SPARK_DEBUG_REPL) {
+ echo("Output directory: " + outputDir)
+ }
+
+ val virtualDirectory = new PlainFile(outputDir) // "directory" for classfiles
+ /** Jetty server that will serve our classes to worker nodes */
+ val classServerPort = conf.getInt("spark.replClassServer.port", 0)
+ val classServer = new HttpServer(outputDir, new SecurityManager(conf), classServerPort, "HTTP class server")
+ private var currentSettings: Settings = initialSettings
+ var printResults = true // whether to print result lines
+ var totalSilence = false // whether to print anything
+ private var _initializeComplete = false // compiler is initialized
+ private var _isInitialized: Future[Boolean] = null // set up initialization future
+ private var bindExceptions = true // whether to bind the lastException variable
+ private var _executionWrapper = "" // code to be wrapped around all lines
+
+
+ // Start the classServer and store its URI in a spark system property
+ // (which will be passed to executors so that they can connect to it)
+ classServer.start()
+ if (SPARK_DEBUG_REPL) {
+ echo("Class server started, URI = " + classServer.uri)
+ }
+
+ /** We're going to go to some trouble to initialize the compiler asynchronously.
+ * It's critical that nothing call into it until it's been initialized or we will
+ * run into unrecoverable issues, but the perceived repl startup time goes
+ * through the roof if we wait for it. So we initialize it with a future and
+ * use a lazy val to ensure that any attempt to use the compiler object waits
+ * on the future.
+ */
+ private var _classLoader: AbstractFileClassLoader = null // active classloader
+ private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
+
+ private trait ExposeAddUrl extends URLClassLoader { def addNewUrl(url: URL) = this.addURL(url) }
+ private var _runtimeClassLoader: URLClassLoader with ExposeAddUrl = null // wrapper exposing addURL
+
+ private val nextReqId = {
+ var counter = 0
+ () => { counter += 1 ; counter }
+ }
+
+ def compilerClasspath: Seq[URL] = (
+ if (isInitializeComplete) global.classPath.asURLs
+ else new PathResolver(settings).result.asURLs // the compiler's classpath
+ )
+ def settings = currentSettings
+ def mostRecentLine = prevRequestList match {
+ case Nil => ""
+ case req :: _ => req.originalLine
+ }
+ // Run the code body with the given boolean settings flipped to true.
+ def withoutWarnings[T](body: => T): T = beQuietDuring {
+ val saved = settings.nowarn.value
+ if (!saved)
+ settings.nowarn.value = true
+
+ try body
+ finally if (!saved) settings.nowarn.value = false
+ }
+
+ /** construct an interpreter that reports to Console */
+ def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this() = this(new Settings())
+
+ lazy val repllog: Logger = new Logger {
+ val out: JPrintWriter = imain.out
+ val isInfo: Boolean = BooleanProp keyExists "scala.repl.info"
+ val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
+ val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
+ }
+ lazy val formatting: Formatting = new Formatting {
+ val prompt = Properties.shellPromptString
+ }
+ lazy val reporter: ConsoleReporter = new SparkIMain.ReplReporter(this)
+
+ import formatting._
+ import reporter.{ printMessage, withoutTruncating }
+
+ // This exists mostly because using the reporter too early leads to deadlock.
+ private def echo(msg: String) { Console println msg }
+ private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
+ private def _initialize() = {
+ try {
+ // todo. if this crashes, REPL will hang
+ new _compiler.Run() compileSources _initSources
+ _initializeComplete = true
+ true
+ }
+ catch AbstractOrMissingHandler()
+ }
+ private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+
+ // argument is a thunk to execute after init is done
+ def initialize(postInitSignal: => Unit) {
+ synchronized {
+ if (_isInitialized == null) {
+ _isInitialized = io.spawn {
+ try _initialize()
+ finally postInitSignal
+ }
+ }
+ }
+ }
+ def initializeSynchronous(): Unit = {
+ if (!isInitializeComplete) {
+ _initialize()
+ assert(global != null, global)
+ }
+ }
+ def isInitializeComplete = _initializeComplete
+
+ /** the public, go through the future compiler */
+ lazy val global: Global = {
+ if (isInitializeComplete) _compiler
+ else {
+ // If init hasn't been called yet you're on your own.
+ if (_isInitialized == null) {
+ logWarning("Warning: compiler accessed before init set up. Assuming no postInit code.")
+ initialize(())
+ }
+ // // blocks until it is ; false means catastrophic failure
+ if (_isInitialized.get()) _compiler
+ else null
+ }
+ }
+ @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
+ lazy val compiler: global.type = global
+
+ import global._
+ import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
+ import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
+
+ implicit class ReplTypeOps(tp: Type) {
+ def orElse(other: => Type): Type = if (tp ne NoType) tp else other
+ def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+ }
+
+ // TODO: If we try to make naming a lazy val, we run into big time
+ // scalac unhappiness with what look like cycles. It has not been easy to
+ // reduce, but name resolution clearly takes different paths.
+ object naming extends {
+ val global: imain.global.type = imain.global
+ } with Naming {
+ // make sure we don't overwrite their unwisely named res3 etc.
+ def freshUserTermName(): TermName = {
+ val name = newTermName(freshUserVarName())
+ if (definedNameMap contains name) freshUserTermName()
+ else name
+ }
+ def isUserTermName(name: Name) = isUserVarName("" + name)
+ def isInternalTermName(name: Name) = isInternalVarName("" + name)
+ }
+ import naming._
+
+ object deconstruct extends {
+ val global: imain.global.type = imain.global
+ } with StructuredTypeStrings
+
+ lazy val memberHandlers = new {
+ val intp: imain.type = imain
+ } with SparkMemberHandlers
+ import memberHandlers._
+
+ /** Temporarily be quiet */
+ def beQuietDuring[T](body: => T): T = {
+ val saved = printResults
+ printResults = false
+ try body
+ finally printResults = saved
+ }
+ def beSilentDuring[T](operation: => T): T = {
+ val saved = totalSilence
+ totalSilence = true
+ try operation
+ finally totalSilence = saved
+ }
+
+ def quietRun[T](code: String) = beQuietDuring(interpret(code))
+
+
+ private def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+ case t: ControlThrowable => throw t
+ case t: Throwable =>
+ logDebug(label + ": " + unwrap(t))
+ logDebug(stackTraceString(unwrap(t)))
+ alt
+ }
+ /** takes AnyRef because it may be binding a Throwable or an Exceptional */
+
+ private def withLastExceptionLock[T](body: => T, alt: => T): T = {
+ assert(bindExceptions, "withLastExceptionLock called incorrectly.")
+ bindExceptions = false
+
+ try beQuietDuring(body)
+ catch logAndDiscard("withLastExceptionLock", alt)
+ finally bindExceptions = true
+ }
+
+ def executionWrapper = _executionWrapper
+ def setExecutionWrapper(code: String) = _executionWrapper = code
+ def clearExecutionWrapper() = _executionWrapper = ""
+
+ /** interpreter settings */
+ lazy val isettings = new SparkISettings(this)
+
+ /** Instantiate a compiler. Overridable. */
+ protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
+ settings.outputDirs setSingleOutput virtualDirectory
+ settings.exposeEmptyPackage.value = true
+ new Global(settings, reporter) with ReplGlobal {
+ override def toString: String = "<global>"
+ }
+ }
+
+ /**
+ * Adds any specified jars to the compile and runtime classpaths.
+ *
+ * @note Currently only supports jars, not directories
+ * @param urls The list of items to add to the compile and runtime classpaths
+ */
+ def addUrlsToClassPath(urls: URL*): Unit = {
+ new Run // Needed to force initialization of "something" to correctly load Scala classes from jars
+ urls.foreach(_runtimeClassLoader.addNewUrl) // Add jars/classes to runtime for execution
+ updateCompilerClassPath(urls: _*) // Add jars/classes to compile time for compiling
+ }
+
+ protected def updateCompilerClassPath(urls: URL*): Unit = {
+ require(!global.forMSIL) // Only support JavaPlatform
+
+ val platform = global.platform.asInstanceOf[JavaPlatform]
+
+ val newClassPath = mergeUrlsIntoClassPath(platform, urls: _*)
+
+ // NOTE: Must use reflection until this is exposed/fixed upstream in Scala
+ val fieldSetter = platform.getClass.getMethods
+ .find(_.getName.endsWith("currentClassPath_$eq")).get
+ fieldSetter.invoke(platform, Some(newClassPath))
+
+ // Reload all jars specified into our compiler
+ global.invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
+ protected def mergeUrlsIntoClassPath(platform: JavaPlatform, urls: URL*): MergedClassPath[AbstractFile] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allClassPaths = (
+ platform.classPath.asInstanceOf[MergedClassPath[AbstractFile]].entries ++
+ urls.map(url => {
+ platform.classPath.context.newClassPath(
+ if (url.getProtocol == "file") {
+ val f = new File(url.getPath)
+ if (f.isDirectory)
+ io.AbstractFile.getDirectory(f)
+ else
+ io.AbstractFile.getFile(f)
+ } else {
+ io.AbstractFile.getURL(url)
+ }
+ )
+ })
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allClassPaths, platform.classPath.context)
+ }
+
+ /** Parent classloader. Overridable. */
+ protected def parentClassLoader: ClassLoader =
+ SparkHelper.explicitParentLoader(settings).getOrElse( this.getClass.getClassLoader() )
+
+ /* A single class loader is used for all commands interpreted by this Interpreter.
+ It would also be possible to create a new class loader for each command
+ to interpret. The advantages of the current approach are:
+
+ - Expressions are only evaluated one time. This is especially
+ significant for I/O, e.g. "val x = Console.readLine"
+
+ The main disadvantage is:
+
+ - Objects, classes, and methods cannot be rebound. Instead, definitions
+ shadow the old ones, and old code objects refer to the old
+ definitions.
+ */
+ def resetClassLoader() = {
+ logDebug("Setting new classloader: was " + _classLoader)
+ _classLoader = null
+ ensureClassLoader()
+ }
+ final def ensureClassLoader() {
+ if (_classLoader == null)
+ _classLoader = makeClassLoader()
+ }
+ def classLoader: AbstractFileClassLoader = {
+ ensureClassLoader()
+ _classLoader
+ }
+ private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
+ /** Overridden here to try translating a simple name to the generated
+ * class name if the original attempt fails. This method is used by
+ * getResourceAsStream as well as findClass.
+ */
+ override protected def findAbstractFile(name: String): AbstractFile = {
+ super.findAbstractFile(name) match {
+ // deadlocks on startup if we try to translate names too early
+ case null if isInitializeComplete =>
+ generatedName(name) map (x => super.findAbstractFile(x)) orNull
+ case file =>
+ file
+ }
+ }
+ }
+ private def makeClassLoader(): AbstractFileClassLoader =
+ new TranslatingClassLoader(parentClassLoader match {
+ case null => ScalaClassLoader fromURLs compilerClasspath
+ case p =>
+ _runtimeClassLoader = new URLClassLoader(compilerClasspath, p) with ExposeAddUrl
+ _runtimeClassLoader
+ })
+
+ def getInterpreterClassLoader() = classLoader
+
+ // Set the current Java "context" class loader to this interpreter's class loader
+ def setContextClassLoader() = classLoader.setAsContext()
+
+ /** Given a simple repl-defined name, returns the real name of
+ * the class representing it, e.g. for "Bippy" it may return
+ * {{{
+ * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
+ * }}}
+ */
+ def generatedName(simpleName: String): Option[String] = {
+ if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
+ else optFlatName(simpleName)
+ }
+ def flatName(id: String) = optFlatName(id) getOrElse id
+ def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
+
+ def allDefinedNames = definedNameMap.keys.toList.sorted
+ def pathToType(id: String): String = pathToName(newTypeName(id))
+ def pathToTerm(id: String): String = pathToName(newTermName(id))
+ def pathToName(name: Name): String = {
+ if (definedNameMap contains name)
+ definedNameMap(name) fullPath name
+ else name.toString
+ }
+
+ /** Most recent tree handled which wasn't wholly synthetic. */
+ private def mostRecentlyHandledTree: Option[Tree] = {
+ prevRequests.reverse foreach { req =>
+ req.handlers.reverse foreach {
+ case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
+ case _ => ()
+ }
+ }
+ None
+ }
+
+ /** Stubs for work in progress. */
+ def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
+ for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
+ logDebug("Redefining type '%s'\n %s -> %s".format(name, t1, t2))
+ }
+ }
+
+ def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
+ for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
+ // Printing the types here has a tendency to cause assertion errors, like
+ // assertion failed: fatal: <refinement> has owner value x, but a class owner is required
+ // so DBG is by-name now to keep it in the family. (It also traps the assertion error,
+ // but we don't want to unnecessarily risk hosing the compiler's internal state.)
+ logDebug("Redefining term '%s'\n %s -> %s".format(name, t1, t2))
+ }
+ }
+
+ def recordRequest(req: Request) {
+ if (req == null || referencedNameMap == null)
+ return
+
+ prevRequests += req
+ req.referencedNames foreach (x => referencedNameMap(x) = req)
+
+ // warning about serially defining companions. It'd be easy
+ // enough to just redefine them together but that may not always
+ // be what people want so I'm waiting until I can do it better.
+ for {
+ name <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
+ oldReq <- definedNameMap get name.companionName
+ newSym <- req.definedSymbols get name
+ oldSym <- oldReq.definedSymbols get name.companionName
+ if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
+ } {
+ afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
+ replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
+ }
+
+ // Updating the defined name map
+ req.definedNames foreach { name =>
+ if (definedNameMap contains name) {
+ if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
+ else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+ }
+ definedNameMap(name) = req
+ }
+ }
+
+ def replwarn(msg: => String) {
+ if (!settings.nowarnings.value)
+ printMessage(msg)
+ }
+
+ def isParseable(line: String): Boolean = {
+ beSilentDuring {
+ try parse(line) match {
+ case Some(xs) => xs.nonEmpty // parses as-is
+ case None => true // incomplete
+ }
+ catch { case x: Exception => // crashed the compiler
+ replwarn("Exception in isParseable(\"" + line + "\"): " + x)
+ false
+ }
+ }
+ }
+
+ def compileSourcesKeepingRun(sources: SourceFile*) = {
+ val run = new Run()
+ reporter.reset()
+ run compileSources sources.toList
+ (!reporter.hasErrors, run)
+ }
+
+ /** Compile an nsc SourceFile. Returns true if there are
+ * no compilation errors, or false otherwise.
+ */
+ def compileSources(sources: SourceFile*): Boolean =
+ compileSourcesKeepingRun(sources: _*)._1
+
+ /** Compile a string. Returns true if there are no
+ * compilation errors, or false otherwise.
+ */
+ def compileString(code: String): Boolean =
+ compileSources(new BatchSourceFile("<script>", code))
+
+ /** Build a request from the user. `trees` is `line` after being parsed.
+ */
+ private def buildRequest(line: String, trees: List[Tree]): Request = {
+ executingRequest = new Request(line, trees)
+ executingRequest
+ }
+
+ // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
+ // the close brace is commented out. Strip single-line comments.
+ // ... but for error message output reasons this is not used, and rather than
+ // enclosing in braces it is constructed like "val x =\n5 // foo".
+ private def removeComments(line: String): String = {
+ showCodeIfDebugging(line) // as we're about to lose our // show
+ line.lines map (s => s indexOf "//" match {
+ case -1 => s
+ case idx => s take idx
+ }) mkString "\n"
+ }
+
+ private def safePos(t: Tree, alt: Int): Int =
+ try t.pos.startOrPoint
+ catch { case _: UnsupportedOperationException => alt }
+
+ // Given an expression like 10 * 10 * 10 we receive the parent tree positioned
+ // at a '*'. So look at each subtree and find the earliest of all positions.
+ private def earliestPosition(tree: Tree): Int = {
+ var pos = Int.MaxValue
+ tree foreach { t =>
+ pos = math.min(pos, safePos(t, Int.MaxValue))
+ }
+ pos
+ }
+
+
+ private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ val content = indentCode(line)
+ val trees = parse(content) match {
+ case None => return Left(IR.Incomplete)
+ case Some(Nil) => return Left(IR.Error) // parse error or empty input
+ case Some(trees) => trees
+ }
+ logDebug(
+ trees map (t => {
+ // [Eugene to Paul] previously it just said `t map ...`
+ // because there was an implicit conversion from Tree to a list of Trees
+ // however Martin and I have removed the conversion
+ // (it was conflicting with the new reflection API),
+ // so I had to rewrite this a bit
+ val subs = t collect { case sub => sub }
+ subs map (t0 =>
+ " " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
+ ) mkString ""
+ }) mkString "\n"
+ )
+ // If the last tree is a bare expression, pinpoint where it begins using the
+ // AST node position and snap the line off there. Rewrite the code embodied
+ // by the last tree as a ValDef instead, so we can access the value.
+ trees.last match {
+ case _:Assign => // we don't want to include assignments
+ case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
+ val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
+ val rewrittenLine = (
+ // In theory this would come out the same without the 1-specific test, but
+ // it's a cushion against any more sneaky parse-tree position vs. code mismatches:
+ // this way such issues will only arise on multiple-statement repl input lines,
+ // which most people don't use.
+ if (trees.size == 1) "val " + varName + " =\n" + content
+ else {
+ // The position of the last tree
+ val lastpos0 = earliestPosition(trees.last)
+ // Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
+ // with increasingly hard to decipher positions as we move on to "() => 5",
+ // (x: Int) => x + 1, and more. So I abandon attempts to finesse and just
+ // look for semicolons and newlines, which I'm sure is also buggy.
+ val (raw1, raw2) = content splitAt lastpos0
+ logDebug("[raw] " + raw1 + " <---> " + raw2)
+
+ val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\n'))).size
+ val lastpos = lastpos0 - adjustment
+
+ // the source code split at the laboriously determined position.
+ val (l1, l2) = content splitAt lastpos
+ logDebug("[adj] " + l1 + " <---> " + l2)
+
+ val prefix = if (l1.trim == "") "" else l1 + ";\n"
+ // Note to self: val source needs to have this precise structure so that
+ // error messages print the user-submitted part without the "val res0 = " part.
+ val combined = prefix + "val " + varName + " =\n" + l2
+
+ logDebug(List(
+ " line" -> line,
+ " content" -> content,
+ " was" -> l2,
+ "combined" -> combined) map {
+ case (label, s) => label + ": '" + s + "'"
+ } mkString "\n"
+ )
+ combined
+ }
+ )
+ // Rewriting "foo ; bar ; 123"
+ // to "foo ; bar ; val resXX = 123"
+ requestFromLine(rewrittenLine, synthetic) match {
+ case Right(req) => return Right(req withOriginalLine line)
+ case x => return x
+ }
+ case _ =>
+ }
+ Right(buildRequest(line, trees))
+ }
+
+ // normalize non-public types so we don't see protected aliases like Self
+ def normalizeNonPublic(tp: Type) = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
+ case _ => tp
+ }
+
+ /**
+ * Interpret one line of input. All feedback, including parse errors
+ * and evaluation results, are printed via the supplied compiler's
+ * reporter. Values defined are available for future interpreted strings.
+ *
+ * The return value is whether the line was interpreter successfully,
+ * e.g. that there were no parse errors.
+ */
+ def interpret(line: String): IR.Result = interpret(line, false)
+ def interpretSynthetic(line: String): IR.Result = interpret(line, true)
+ def interpret(line: String, synthetic: Boolean): IR.Result = {
+ def loadAndRunReq(req: Request) = {
+ classLoader.setAsContext()
+ val (result, succeeded) = req.loadAndRun
+
+ /** To our displeasure, ConsoleReporter offers only printMessage,
+ * which tacks a newline on the end. Since that breaks all the
+ * output checking, we have to take one off to balance.
+ */
+ if (succeeded) {
+ if (printResults && result != "")
+ printMessage(result stripSuffix "\n")
+ else if (isReplDebug) // show quiet-mode activity
+ printMessage(result.trim.lines map ("[quiet] " + _) mkString "\n")
+
+ // Book-keeping. Have to record synthetic requests too,
+ // as they may have been issued for information, e.g. :type
+ recordRequest(req)
+ IR.Success
+ }
+ else {
+ // don't truncate stack traces
+ withoutTruncating(printMessage(result))
+ IR.Error
+ }
+ }
+
+ if (global == null) IR.Error
+ else requestFromLine(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) =>
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
+ if (req == null || !req.compile) IR.Error
+ else loadAndRunReq(req)
+ }
+ }
+
+ /** Bind a specified name to a specified value. The name may
+ * later be used by expressions passed to interpret.
+ *
+ * @param name the variable name to bind
+ * @param boundType the type of the variable, as a string
+ * @param value the object value to bind to it
+ * @return an indication of whether the binding succeeded
+ */
+ def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
+ val bindRep = new ReadEvalPrint()
+ val run = bindRep.compile("""
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
+ """.stripMargin.format(bindRep.evalName, boundType, boundType)
+ )
+ bindRep.callEither("set", value) match {
+ case Left(ex) =>
+ logDebug("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
+ logDebug(util.stackTraceString(ex))
+ IR.Error
+
+ case Right(_) =>
+ val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
+ logDebug("Interpreting: " + line)
+ interpret(line)
+ }
+ }
+ def directBind(name: String, boundType: String, value: Any): IR.Result = {
+ val result = bind(name, boundType, value)
+ if (result == IR.Success)
+ directlyBoundNames += newTermName(name)
+ result
+ }
+ def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
+ def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
+
+ def rebind(p: NamedParam): IR.Result = {
+ val name = p.name
+ val oldType = typeOfTerm(name) orElse { return IR.Error }
+ val newType = p.tpe
+ val tempName = freshInternalVarName()
+
+ quietRun("val %s = %s".format(tempName, name))
+ quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
+ }
+ def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
+ def addImports(ids: String*): IR.Result =
+ if (ids.isEmpty) IR.Success
+ else interpret("import " + ids.mkString(", "))
+
+ def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
+ def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
+ def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
+ def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
+ def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
+
+ /** Reset this interpreter, forgetting all user-specified requests. */
+ def reset() {
+ clearExecutionWrapper()
+ resetClassLoader()
+ resetAllCreators()
+ prevRequests.clear()
+ referencedNameMap.clear()
+ definedNameMap.clear()
+ virtualDirectory.delete()
+ virtualDirectory.create()
+ }
+
+ /** This instance is no longer needed, so release any resources
+ * it is using. The reporter's output gets flushed.
+ */
+ def close() {
+ reporter.flush()
+ classServer.stop()
+ }
+
+ /**
+ * Captures the session names (which are set by system properties) once, instead of for each line.
+ */
+ object FixedSessionNames {
+ val lineName = sessionNames.line
+ val readName = sessionNames.read
+ val evalName = sessionNames.eval
+ val printName = sessionNames.print
+ val resultName = sessionNames.result
+ }
+
+ /** Here is where we:
+ *
+ * 1) Read some source code, and put it in the "read" object.
+ * 2) Evaluate the read object, and put the result in the "eval" object.
+ * 3) Create a String for human consumption, and put it in the "print" object.
+ *
+ * Read! Eval! Print! Some of that not yet centralized here.
+ */
+ class ReadEvalPrint(val lineId: Int) {
+ def this() = this(freshLineId())
+
+ private var lastRun: Run = _
+ private var evalCaught: Option[Throwable] = None
+ private var conditionalWarnings: List[ConditionalWarning] = Nil
+
+ val packageName = FixedSessionNames.lineName + lineId
+ val readName = FixedSessionNames.readName
+ val evalName = FixedSessionNames.evalName
+ val printName = FixedSessionNames.printName
+ val resultName = FixedSessionNames.resultName
+
+ def bindError(t: Throwable) = {
+ // Immediately throw the exception if we are asked to propagate them
+ if (propagateExceptions) {
+ throw unwrap(t)
+ }
+ if (!bindExceptions) // avoid looping if already binding
+ throw t
+
+ val unwrapped = unwrap(t)
+ withLastExceptionLock[String]({
+ directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
+ util.stackTraceString(unwrapped)
+ }, util.stackTraceString(unwrapped))
+ }
+
+ // TODO: split it out into a package object and a regular
+ // object and we can do that much less wrapping.
+ def packageDecl = "package " + packageName
+
+ def pathTo(name: String) = packageName + "." + name
+ def packaged(code: String) = packageDecl + "\n\n" + code
+
+ def readPath = pathTo(readName)
+ def evalPath = pathTo(evalName)
+ def printPath = pathTo(printName)
+
+ def call(name: String, args: Any*): AnyRef = {
+ val m = evalMethod(name)
+ logDebug("Invoking: " + m)
+ if (args.nonEmpty)
+ logDebug(" with args: " + args.mkString(", "))
+
+ m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+ }
+
+ def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
+ try Right(call(name, args: _*))
+ catch { case ex: Throwable => Left(ex) }
+
+ def callOpt(name: String, args: Any*): Option[AnyRef] =
+ try Some(call(name, args: _*))
+ catch { case ex: Throwable => bindError(ex) ; None }
+
+ class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
+
+ private def evalError(path: String, ex: Throwable) =
+ throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex)
+
+ private def load(path: String): Class[_] = {
+ try Class.forName(path, true, classLoader)
+ catch { case ex: Throwable => evalError(path, unwrap(ex)) }
+ }
+
+ lazy val evalClass = load(evalPath)
+ lazy val evalValue = callEither(resultName) match {
+ case Left(ex) => evalCaught = Some(ex) ; None
+ case Right(result) => Some(result)
+ }
+
+ def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
+
+ /** The innermost object inside the wrapper, found by
+ * following accessPath into the outer one.
+ */
+ def resolvePathToSymbol(accessPath: String): Symbol = {
+ // val readRoot = getRequiredModule(readPath) // the outermost wrapper
+ // MATEI: Changed this to getClass because the root object is no longer a module (Scala singleton object)
+
+ val readRoot = rootMirror.getClassByName(newTypeName(readPath)) // the outermost wrapper
+ (accessPath split '.').foldLeft(readRoot: Symbol) {
+ case (sym, "") => sym
+ case (sym, name) => afterTyper(termMember(sym, name))
+ }
+ }
+ /** We get a bunch of repeated warnings for reasons I haven't
+ * entirely figured out yet. For now, squash.
+ */
+ private def updateRecentWarnings(run: Run) {
+ def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
+ case Nil => Nil
+ case ((pos, msg)) :: rest =>
+ val filtered = rest filter { case (pos0, msg0) =>
+ (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
+ // same messages and same line content after whitespace removal
+ // but we want to let through multiple warnings on the same line
+ // from the same run. The untrimmed line will be the same since
+ // there's no whitespace indenting blowing it.
+ (pos.lineContent == pos0.lineContent)
+ }
+ }
+ ((pos, msg)) :: loop(filtered)
+ }
+ // PRASHANT: This leads to a NoSuchMethodError for _.warnings. Yet to figure out its purpose.
+ // val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
+ // if (warnings.nonEmpty)
+ // mostRecentWarnings = warnings
+ }
+ private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
+ case Array(method) => method
+ case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
+ }
+ private def compileAndSaveRun(label: String, code: String) = {
+ showCodeIfDebugging(code)
+ val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
+ updateRecentWarnings(run)
+ lastRun = run
+ success
+ }
+ }
+
+ /** One line of code submitted by the user for interpretation */
+ // private
+ class Request(val line: String, val trees: List[Tree]) {
+ val reqId = nextReqId()
+ val lineRep = new ReadEvalPrint()
+
+ private var _originalLine: String = null
+ def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
+ def originalLine = if (_originalLine == null) line else _originalLine
+
+ /** handlers for each tree in this request */
+ val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ def defHandlers = handlers collect { case x: MemberDefHandler => x }
+
+ /** all (public) names defined by these statements */
+ val definedNames = handlers flatMap (_.definedNames)
+
+ /** list of names used by this expression */
+ val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
+
+ /** def and val names */
+ def termNames = handlers flatMap (_.definesTerm)
+ def typeNames = handlers flatMap (_.definesType)
+ def definedOrImported = handlers flatMap (_.definedOrImported)
+ def definedSymbolList = defHandlers flatMap (_.definedSymbols)
+
+ def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
+ def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
+
+ val definedClasses = handlers.exists {
+ case _: ClassHandler => true
+ case _ => false
+ }
+
+ /** Code to import bound names from previous lines - accessPath is code to
+ * append to objectName to access anything bound by request.
+ */
+ val SparkComputedImports(importsPreamble, importsTrailer, accessPath) =
+ importsCode(referencedNames.toSet, definedClasses)
+
+ /** Code to access a variable with the specified name */
+ def fullPath(vname: String) = {
+ // lineRep.readPath + accessPath + ".`%s`".format(vname)
+ lineRep.readPath + ".INSTANCE" + accessPath + ".`%s`".format(vname)
+ }
+ /** Same as fullpath, but after it has been flattened, so:
+ * $line5.$iw.$iw.$iw.Bippy // fullPath
+ * $line5.$iw$$iw$$iw$Bippy // fullFlatName
+ */
+ def fullFlatName(name: String) =
+ // lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
+ lineRep.readPath + ".INSTANCE" + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
+
+ /** The unmangled symbol name, but supplemented with line info. */
+ def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
+
+ /** Code to access a variable with the specified name */
+ def fullPath(vname: Name): String = fullPath(vname.toString)
+
+ /** the line of code to compute */
+ def toCompute = line
+
+ /** generate the source code for the object that computes this request */
+ private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+ def path = pathToTerm("$intp")
+ def envLines = {
+ if (!isReplPower) Nil // power mode only for now
+ // $intp is not bound; punt, but include the line.
+ else if (path == "$intp") List(
+ "def $line = " + tquoted(originalLine),
+ "def $trees = Nil"
+ )
+ else List(
+ "def $line = " + tquoted(originalLine),
+ "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
+ "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
+ )
+ }
+
+ val preamble = """
+ |class %s extends Serializable {
+ | %s%s%s
+ """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
+ val postamble = importsTrailer + "\n}" + "\n" +
+ "object " + lineRep.readName + " {\n" +
+ " val INSTANCE = new " + lineRep.readName + "();\n" +
+ "}\n"
+ val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+
+ /*
+ val preamble = """
+ |object %s extends Serializable {
+ |%s%s%s
+ """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
+ val postamble = importsTrailer + "\n}"
+ val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+ */
+
+ }
+
+ private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
+ /** We only want to generate this code when the result
+ * is a value which can be referred to as-is.
+ */
+ val evalResult =
+ if (!handlers.last.definesValue) ""
+ else handlers.last.definesTerm match {
+ case Some(vname) if typeOf contains vname =>
+ "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
+ case _ => ""
+ }
+
+ // first line evaluates object to make sure constructor is run
+ // initial "" so later code can uniformly be: + etc
+ val preamble = """
+ |object %s {
+ | %s
+ | val %s: String = %s {
+ | %s
+ | (""
+ """.stripMargin.format(
+ lineRep.evalName, evalResult, lineRep.printName,
+ executionWrapper, lineRep.readName + ".INSTANCE" + accessPath
+ )
+ val postamble = """
+ | )
+ | }
+ |}
+ """.stripMargin
+ val generate = (m: MemberHandler) => m resultExtractionCode Request.this
+ }
+
+ // get it
+ def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
+ def getEval: Option[AnyRef] = {
+ // ensure it has been compiled
+ compile
+ // try to load it and call the value method
+ lineRep.evalValue filterNot (_ == null)
+ }
+
+ /** Compile the object file. Returns whether the compilation succeeded.
+ * If all goes well, the "types" map is computed. */
+ lazy val compile: Boolean = {
+ // error counting is wrong, hence interpreter may overlook failure - so we reset
+ reporter.reset()
+
+ // compile the object containing the user's code
+ lineRep.compile(ObjectSourceCode(handlers)) && {
+ // extract and remember types
+ typeOf
+ typesOfDefinedTerms
+
+ // Assign symbols to the original trees
+ // TODO - just use the new trees.
+ defHandlers foreach { dh =>
+ val name = dh.member.name
+ definedSymbols get name foreach { sym =>
+ dh.member setSymbol sym
+ logDebug("Set symbol of " + name + " to " + sym.defString)
+ }
+ }
+
+ // compile the result-extraction object
+ withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
+ }
+ }
+
+ lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+ def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+
+ /* typeOf lookup with encoding */
+ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
+ def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
+
+ private def typeMap[T](f: Type => T) =
+ mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
+
+ /** Types of variables defined by this request. */
+ lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
+ /** String representations of same. */
+ lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
+
+ // lazy val definedTypes: Map[Name, Type] = {
+ // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
+ // }
+ lazy val definedSymbols = (
+ termNames.map(x => x -> applyToResultMember(x, x => x)) ++
+ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
+ ).toMap[Name, Symbol] withDefaultValue NoSymbol
+
+ lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
+
+ /** load and run the code using reflection */
+ def loadAndRun: (String, Boolean) = {
+ try { ("" + (lineRep call sessionNames.print), true) }
+ catch { case ex: Throwable => (lineRep.bindError(ex), false) }
+ }
+
+ override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
+ }
+
+ /** Returns the name of the most recent interpreter result.
+ * Mostly this exists so you can conveniently invoke methods on
+ * the previous result.
+ */
+ def mostRecentVar: String =
+ if (mostRecentlyHandledTree.isEmpty) ""
+ else "" + (mostRecentlyHandledTree.get match {
+ case x: ValOrDefDef => x.name
+ case Assign(Ident(name), _) => name
+ case ModuleDef(_, name, _) => name
+ case _ => naming.mostRecentVar
+ })
+
+ private var mostRecentWarnings: List[(global.Position, String)] = Nil
+ def lastWarnings = mostRecentWarnings
+
+ def treesForRequestId(id: Int): List[Tree] =
+ requestForReqId(id).toList flatMap (_.trees)
+
+ def requestForReqId(id: Int): Option[Request] =
+ if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
+ else prevRequests find (_.reqId == id)
+
+ def requestForName(name: Name): Option[Request] = {
+ assert(definedNameMap != null, "definedNameMap is null")
+ definedNameMap get name
+ }
+
+ def requestForIdent(line: String): Option[Request] =
+ requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
+
+ def requestHistoryForName(name: Name): List[Request] =
+ prevRequests.toList.reverse filter (_.definedNames contains name)
+
+
+ def definitionForName(name: Name): Option[MemberHandler] =
+ requestForName(name) flatMap { req =>
+ req.handlers find (_.definedNames contains name)
+ }
+
+ def valueOfTerm(id: String): Option[AnyRef] =
+ requestForName(newTermName(id)) flatMap (_.getEval)
+
+ def classOfTerm(id: String): Option[JClass] =
+ valueOfTerm(id) map (_.getClass)
+
+ def typeOfTerm(id: String): Type = newTermName(id) match {
+ case nme.ROOTPKG => RootClass.tpe
+ case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
+ }
+
+ def symbolOfType(id: String): Symbol =
+ requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+
+ def symbolOfTerm(id: String): Symbol =
+ requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
+
+ def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
+ classOfTerm(id) flatMap { clazz =>
+ new RichClass(clazz).supers find(c => !(new RichClass(c).isScalaAnonymous)) map { nonAnon =>
+ (nonAnon, runtimeTypeOfTerm(id))
+ }
+ }
+ }
+
+ def runtimeTypeOfTerm(id: String): Type = {
+ typeOfTerm(id) andAlso { tpe =>
+ val clazz = classOfTerm(id) getOrElse { return NoType }
+ val staticSym = tpe.typeSymbol
+ val runtimeSym = getClassIfDefined(clazz.getName)
+
+ if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+ runtimeSym.info
+ else NoType
+ }
+ }
+ def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
+ normalizeNonPublic {
+ owner.info.nonPrivateDecl(member).tpe match {
+ case NullaryMethodType(tp) => tp
+ case tp => tp
+ }
+ }
+ }
+
+ object exprTyper extends {
+ val repl: SparkIMain.this.type = imain
+ } with SparkExprTyper { }
+
+ def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
+
+ def symbolOfLine(code: String): Symbol =
+ exprTyper.symbolOfLine(code)
+
+ def typeOfExpression(expr: String, silent: Boolean = true): Type =
+ exprTyper.typeOfExpression(expr, silent)
+
+ protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
+ protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
+
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
+ def definedTypes = onlyTypes(allDefinedNames)
+ def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
+ def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
+
+ // Terms with user-given names (i.e. not res0 and not synthetic)
+ def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
+
+ private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
+
+ /** Translate a repl-defined identifier into a Symbol.
+ */
+ def apply(name: String): Symbol =
+ types(name) orElse terms(name)
+
+ def types(name: String): Symbol = {
+ val tpname = newTypeName(name)
+ findName(tpname) orElse getClassIfDefined(tpname)
+ }
+ def terms(name: String): Symbol = {
+ val termname = newTypeName(name)
+ findName(termname) orElse getModuleIfDefined(termname)
+ }
+ // [Eugene to Paul] possibly you could make use of TypeTags here
+ def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
+ def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
+ def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
+
+ def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
+ def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
+
+ /** the previous requests this interpreter has processed */
+ private var executingRequest: Request = _
+ private val prevRequests = mutable.ListBuffer[Request]()
+ private val referencedNameMap = mutable.Map[Name, Request]()
+ private val definedNameMap = mutable.Map[Name, Request]()
+ private val directlyBoundNames = mutable.Set[Name]()
+
+ def allHandlers = prevRequestList flatMap (_.handlers)
+ def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
+ def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
+
+ def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
+ def prevRequestList = prevRequests.toList
+ def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
+ def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
+ def importHandlers = allHandlers collect { case x: ImportHandler => x }
+
+ def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
+
+ /** Another entry point for tab-completion, ids in scope */
+ def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
+
+ /** Parse the ScalaSig to find type aliases */
+ def aliasForType(path: String) = ByteCode.aliasForType(path)
+
+ def withoutUnwrapping(op: => Unit): Unit = {
+ val saved = isettings.unwrapStrings
+ isettings.unwrapStrings = false
+ try op
+ finally isettings.unwrapStrings = saved
+ }
+
+ def symbolDefString(sym: Symbol) = {
+ TypeStrings.quieter(
+ afterTyper(sym.defString),
+ sym.owner.name + ".this.",
+ sym.owner.fullName + "."
+ )
+ }
+
+ def showCodeIfDebugging(code: String) {
+ /** Secret bookcase entrance for repl debuggers: end the line
+ * with "// show" and see what's going on.
+ */
+ def isShow = code.lines exists (_.trim endsWith "// show")
+ def isShowRaw = code.lines exists (_.trim endsWith "// raw")
+
+ // old style
+ beSilentDuring(parse(code)) foreach { ts =>
+ ts foreach { t =>
+ if (isShow || isShowRaw)
+ withoutUnwrapping(echo(asCompactString(t)))
+ else
+ withoutUnwrapping(logDebug(asCompactString(t)))
+ }
+ }
+ }
+
+ // debugging
+ def debugging[T](msg: String)(res: T) = {
+ logDebug(msg + " " + res)
+ res
+ }
+}
+
+/** Utility methods for the Interpreter. */
+object SparkIMain {
+ // The two name forms this is catching are the two sides of this assignment:
+ //
+ // $line3.$read.$iw.$iw.Bippy =
+ // $line3.$read$$iw$$iw$Bippy@4a6a00ca
+ private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
+ private def removeIWPackages(s: String) = s.replaceAll("""\$(iw|iwC|read|eval|print)[$.]""", "")
+ private def removeSparkVals(s: String) = s.replaceAll("""\$VAL[0-9]+[$.]""", "")
+
+ def stripString(s: String) = removeSparkVals(removeIWPackages(removeLineWrapper(s)))
+
+ trait CodeAssembler[T] {
+ def preamble: String
+ def generate: T => String
+ def postamble: String
+
+ def apply(contributors: List[T]): String = stringFromWriter { code =>
+ code println preamble
+ contributors map generate foreach (code println _)
+ code println postamble
+ }
+ }
+
+ trait StrippingWriter {
+ def isStripping: Boolean
+ def stripImpl(str: String): String
+ def strip(str: String): String = if (isStripping) stripImpl(str) else str
+ }
+ trait TruncatingWriter {
+ def maxStringLength: Int
+ def isTruncating: Boolean
+ def truncate(str: String): String = {
+ if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
+ (str take maxStringLength - 3) + "..."
+ else str
+ }
+ }
+ abstract class StrippingTruncatingWriter(out: JPrintWriter)
+ extends JPrintWriter(out)
+ with StrippingWriter
+ with TruncatingWriter {
+ self =>
+
+ def clean(str: String): String = truncate(strip(str))
+ override def write(str: String) = super.write(clean(str))
+ }
+ class ReplStrippingWriter(intp: SparkIMain) extends StrippingTruncatingWriter(intp.out) {
+ import intp._
+ def maxStringLength = isettings.maxPrintString
+ def isStripping = isettings.unwrapStrings
+ def isTruncating = reporter.truncationOK
+
+ def stripImpl(str: String): String = naming.unmangle(str)
+ }
+
+ class ReplReporter(intp: SparkIMain) extends ConsoleReporter(intp.settings, null, new ReplStrippingWriter(intp)) {
+ override def printMessage(msg: String) {
+ // Avoiding deadlock when the compiler starts logging before
+ // the lazy val is done.
+ if (intp.isInitializeComplete) {
+ if (intp.totalSilence) ()
+ else super.printMessage(msg)
+ }
+ else Console.println(msg)
+ }
+ }
+}
+
+class SparkISettings(intp: SparkIMain) extends Logging {
+ /** A list of paths where :load should look */
+ var loadPath = List(".")
+
+ /** Set this to true to see repl machinery under -Yrich-exceptions.
+ */
+ var showInternalStackTraces = false
+
+ /** The maximum length of toString to use when printing the result
+ * of an evaluation. 0 means no maximum. If a printout requires
+ * more than this number of characters, then the printout is
+ * truncated.
+ */
+ var maxPrintString = 800
+
+ /** The maximum number of completion candidates to print for tab
+ * completion without requiring confirmation.
+ */
+ var maxAutoprintCompletion = 250
+
+ /** String unwrapping can be disabled if it is causing issues.
+ * Settings this to false means you will see Strings like "$iw.$iw.".
+ */
+ var unwrapStrings = true
+
+ def deprecation_=(x: Boolean) = {
+ val old = intp.settings.deprecation.value
+ intp.settings.deprecation.value = x
+ if (!old && x) logDebug("Enabled -deprecation output.")
+ else if (old && !x) logDebug("Disabled -deprecation output.")
+ }
+
+ def deprecation: Boolean = intp.settings.deprecation.value
+
+ def allSettings = Map(
+ "maxPrintString" -> maxPrintString,
+ "maxAutoprintCompletion" -> maxAutoprintCompletion,
+ "unwrapStrings" -> unwrapStrings,
+ "deprecation" -> deprecation
+ )
+
+ private def allSettingsString =
+ allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
+
+ override def toString = """
+ | SparkISettings {
+ | %s
+ | }""".stripMargin.format(allSettingsString)
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkImports.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkImports.scala
new file mode 100644
index 0000000000..193a42dcde
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -0,0 +1,238 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.collection.{ mutable, immutable }
+
+trait SparkImports {
+ self: SparkIMain =>
+
+ import global._
+ import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
+ import memberHandlers._
+
+ def isNoImports = settings.noimports.value
+ def isNoPredef = settings.nopredef.value
+
+ /** Synthetic import handlers for the language defined imports. */
+ private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
+ val hd :: tl = sym.fullName.split('.').toList map newTermName
+ val tree = Import(
+ tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
+ ImportSelector.wildList
+ )
+ tree setSymbol sym
+ new ImportHandler(tree)
+ }
+
+ /** Symbols whose contents are language-defined to be imported. */
+ def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
+ def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
+ def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
+
+ def allImportedNames = importHandlers flatMap (_.importedNames)
+ def importedTerms = onlyTerms(allImportedNames)
+ def importedTypes = onlyTypes(allImportedNames)
+
+ /** Types which have been wildcard imported, such as:
+ * val x = "abc" ; import x._ // type java.lang.String
+ * import java.lang.String._ // object java.lang.String
+ *
+ * Used by tab completion.
+ *
+ * XXX right now this gets import x._ and import java.lang.String._,
+ * but doesn't figure out import String._. There's a lot of ad hoc
+ * scope twiddling which should be swept away in favor of digging
+ * into the compiler scopes.
+ */
+ def sessionWildcards: List[Type] = {
+ importHandlers filter (_.importsWildcard) map (_.targetType) distinct
+ }
+ def wildcardTypes = languageWildcards ++ sessionWildcards
+
+ def languageSymbols = languageWildcardSyms flatMap membersAtPickler
+ def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
+ def importedSymbols = languageSymbols ++ sessionImportedSymbols
+ def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x }
+ def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
+ def implicitSymbols = importedSymbols filter (_.isImplicit)
+
+ def importedTermNamed(name: String): Symbol =
+ importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
+
+ /** Tuples of (source, imported symbols) in the order they were imported.
+ */
+ def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
+ val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
+ val session = importHandlers filter (_.targetType != NoType) map { mh =>
+ (mh.targetType.typeSymbol, mh.importedSymbols)
+ }
+
+ lang ++ session
+ }
+ def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
+ importedSymbolsBySource map {
+ case (k, vs) => (k, vs filter (_.isImplicit))
+ } filterNot (_._2.isEmpty)
+ }
+
+ /** Compute imports that allow definitions from previous
+ * requests to be visible in a new request. Returns
+ * three pieces of related code:
+ *
+ * 1. An initial code fragment that should go before
+ * the code of the new request.
+ *
+ * 2. A code fragment that should go after the code
+ * of the new request.
+ *
+ * 3. An access path which can be traversed to access
+ * any bindings inside code wrapped by #1 and #2 .
+ *
+ * The argument is a set of Names that need to be imported.
+ *
+ * Limitations: This method is not as precise as it could be.
+ * (1) It does not process wildcard imports to see what exactly
+ * they import.
+ * (2) If it imports any names from a request, it imports all
+ * of them, which is not really necessary.
+ * (3) It imports multiple same-named implicits, but only the
+ * last one imported is actually usable.
+ */
+ case class SparkComputedImports(prepend: String, append: String, access: String)
+ def fallback = System.getProperty("spark.repl.fallback", "false").toBoolean
+
+ protected def importsCode(wanted: Set[Name], definedClass: Boolean): SparkComputedImports = {
+ /** Narrow down the list of requests from which imports
+ * should be taken. Removes requests which cannot contribute
+ * useful imports for the specified set of wanted names.
+ */
+ case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+
+ def reqsToUse: List[ReqAndHandler] = {
+ /** Loop through a list of MemberHandlers and select which ones to keep.
+ * 'wanted' is the set of names that need to be imported.
+ */
+ def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
+ // Single symbol imports might be implicits! See bug #1752. Rather than
+ // try to finesse this, we will mimic all imports for now.
+ def keepHandler(handler: MemberHandler) = handler match {
+ /* This case clause tries to "precisely" import only what is required. And in this
+ * it may miss out on some implicits, because implicits are not known in `wanted`. Thus
+ * it is suitable for defining classes. AFAIK while defining classes implicits are not
+ * needed.*/
+ case h: ImportHandler if definedClass && !fallback =>
+ h.importedNames.exists(x => wanted.contains(x))
+ case _: ImportHandler => true
+ case x => x.definesImplicit || (x.definedNames exists wanted)
+ }
+
+ reqs match {
+ case Nil => Nil
+ case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
+ case rh :: rest =>
+ import rh.handler._
+ val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
+ rh :: select(rest, newWanted)
+ }
+ }
+
+ /** Flatten the handlers out and pair each with the original request */
+ select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
+ }
+
+ val code, trailingBraces, accessPath = new StringBuilder
+ val currentImps = mutable.HashSet[Name]()
+
+ // add code for a new object to hold some imports
+ def addWrapper() {
+ val impname = nme.INTERPRETER_IMPORT_WRAPPER
+ code append "class %sC extends Serializable {\n".format(impname)
+ trailingBraces append "}\nval " + impname + " = new " + impname + "C;\n"
+ accessPath append ("." + impname)
+
+ currentImps.clear
+ // code append "object %s {\n".format(impname)
+ // trailingBraces append "}\n"
+ // accessPath append ("." + impname)
+
+ // currentImps.clear
+ }
+
+ addWrapper()
+
+ // loop through previous requests, adding imports for each one
+ for (ReqAndHandler(req, handler) <- reqsToUse) {
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler =>
+ if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
+ addWrapper()
+
+ code append (x.member + "\n")
+
+ // give wildcard imports a import wrapper all to their own
+ if (x.importsWildcard) addWrapper()
+ else currentImps ++= x.importedNames
+
+ // For other requests, import each defined name.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x: ClassHandler if !fallback =>
+ // I am trying to guess if the import is a defined class
+ // This is an ugly hack, I am not 100% sure of the consequences.
+ // Here we, let everything but "defined classes" use the import with val.
+ // The reason for this is, otherwise the remote executor tries to pull the
+ // classes involved and may fail.
+ for (imv <- x.definedNames) {
+ val objName = req.lineRep.readPath
+ code.append("import " + objName + ".INSTANCE" + req.accessPath + ".`" + imv + "`\n")
+ }
+
+ case x =>
+ for (imv <- x.definedNames) {
+ if (currentImps contains imv) addWrapper()
+ val objName = req.lineRep.readPath
+ val valName = "$VAL" + newValId()
+
+ if(!code.toString.endsWith(".`" + imv + "`;\n")) { // Which means already imported
+ code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+ }
+ // code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ // code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+ // code append ("import " + (req fullPath imv) + "\n")
+ currentImps += imv
+ }
+ }
+ }
+ // add one extra wrapper, to prevent warnings in the common case of
+ // redefining the value bound in the last interpreter request.
+ addWrapper()
+ SparkComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
+ }
+
+ private def allReqAndHandlers =
+ prevRequestList flatMap (req => req.handlers map (req -> _))
+
+ private def membersAtPickler(sym: Symbol): List[Symbol] =
+ beforePickler(sym.info.nonPrivateMembers.toList)
+
+ private var curValId = 0
+
+ private def newValId(): Int = {
+ curValId += 1
+ curValId
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
new file mode 100644
index 0000000000..3159b70008
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
@@ -0,0 +1,377 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.tools.jline._
+import scala.tools.jline.console.completer._
+import Completion._
+import scala.collection.mutable.ListBuffer
+import org.apache.spark.Logging
+
+// REPL completor - queries supplied interpreter for valid
+// completions based on current contents of buffer.
+class SparkJLineCompletion(val intp: SparkIMain) extends Completion with CompletionOutput with Logging {
+ val global: intp.global.type = intp.global
+ import global._
+ import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import rootMirror.{ RootClass, getModuleIfDefined }
+ type ExecResult = Any
+ import intp.{ debugging }
+
+ // verbosity goes up with consecutive tabs
+ private var verbosity: Int = 0
+ def resetVerbosity() = verbosity = 0
+
+ def getSymbol(name: String, isModule: Boolean) = (
+ if (isModule) getModuleIfDefined(name)
+ else getModuleIfDefined(name)
+ )
+ def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
+ def typeOf(name: String) = getType(name, false)
+ def moduleOf(name: String) = getType(name, true)
+
+ trait CompilerCompletion {
+ def tp: Type
+ def effectiveTp = tp match {
+ case MethodType(Nil, resType) => resType
+ case NullaryMethodType(resType) => resType
+ case _ => tp
+ }
+
+ // for some reason any's members don't show up in subclasses, which
+ // we need so 5.<tab> offers asInstanceOf etc.
+ private def anyMembers = AnyClass.tpe.nonPrivateMembers
+ def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
+
+ def tos(sym: Symbol): String = sym.decodedName
+ def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
+ def hasMethod(s: String) = memberNamed(s).isMethod
+
+ // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
+ // compiler to crash for reasons not yet known.
+ def members = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+ def methods = members.toList filter (_.isMethod)
+ def packages = members.toList filter (_.isPackage)
+ def aliases = members.toList filter (_.isAliasType)
+
+ def memberNames = members map tos
+ def methodNames = methods map tos
+ def packageNames = packages map tos
+ def aliasNames = aliases map tos
+ }
+
+ object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+ override def memberNamed(s: String) = NoSymbol
+ override def members = Nil
+ override def follow(s: String) = None
+ override def alternativesFor(id: String) = Nil
+ }
+
+ object TypeMemberCompletion {
+ def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
+ new TypeMemberCompletion(tp) {
+ var upgraded = false
+ lazy val upgrade = {
+ intp rebind param
+ intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
+ upgraded = true
+ new TypeMemberCompletion(runtimeType)
+ }
+ override def completions(verbosity: Int) = {
+ super.completions(verbosity) ++ (
+ if (verbosity == 0) Nil
+ else upgrade.completions(verbosity)
+ )
+ }
+ override def follow(s: String) = super.follow(s) orElse {
+ if (upgraded) upgrade.follow(s)
+ else None
+ }
+ override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
+ if (upgraded) upgrade.alternativesFor(id)
+ else Nil
+ ) distinct
+ }
+ }
+ def apply(tp: Type): TypeMemberCompletion = {
+ if (tp eq NoType) NoTypeCompletion
+ else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+ else new TypeMemberCompletion(tp)
+ }
+ def imported(tp: Type) = new ImportCompletion(tp)
+ }
+
+ class TypeMemberCompletion(val tp: Type) extends CompletionAware
+ with CompilerCompletion {
+ def excludeEndsWith: List[String] = Nil
+ def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
+ def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
+
+ def methodSignatureString(sym: Symbol) = {
+ IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
+ }
+
+ def exclude(name: String): Boolean = (
+ (name contains "$") ||
+ (excludeNames contains name) ||
+ (excludeEndsWith exists (name endsWith _)) ||
+ (excludeStartsWith exists (name startsWith _))
+ )
+ def filtered(xs: List[String]) = xs filterNot exclude distinct
+
+ def completions(verbosity: Int) =
+ debugging(tp + " completions ==> ")(filtered(memberNames))
+
+ override def follow(s: String): Option[CompletionAware] =
+ debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
+
+ override def alternativesFor(id: String): List[String] =
+ debugging(id + " alternatives ==> ") {
+ val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
+
+ if (alts.nonEmpty) "" :: alts else Nil
+ }
+
+ override def toString = "%s (%d members)".format(tp, members.size)
+ }
+
+ class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+ override def excludeNames = anyref.methodNames
+ }
+
+ class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(memberNames)
+ case _ => memberNames
+ }
+ }
+
+ class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(members filterNot (_.isSetter) map tos)
+ case _ => super.completions(verbosity)
+ }
+ }
+
+ // not for completion but for excluding
+ object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
+
+ // the unqualified vals/defs/etc visible in the repl
+ object ids extends CompletionAware {
+ override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
+ // now we use the compiler for everything.
+ override def follow(id: String): Option[CompletionAware] = {
+ if (!completions(0).contains(id))
+ return None
+
+ val tpe = intp typeOfExpression id
+ if (tpe == NoType)
+ return None
+
+ def default = Some(TypeMemberCompletion(tpe))
+
+ // only rebinding vals in power mode for now.
+ if (!isReplPower) default
+ else intp runtimeClassAndTypeOfTerm id match {
+ case Some((clazz, runtimeType)) =>
+ val sym = intp.symbolOfTerm(id)
+ if (sym.isStable) {
+ val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+ Some(TypeMemberCompletion(tpe, runtimeType, param))
+ }
+ else default
+ case _ =>
+ default
+ }
+ }
+ override def toString = "<repl ids> (%s)".format(completions(0).size)
+ }
+
+ // user-issued wildcard imports like "import global._" or "import String._"
+ private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
+
+ // literal Ints, Strings, etc.
+ object literals extends CompletionAware {
+ def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
+ def completions(verbosity: Int) = Nil
+
+ override def follow(id: String) = simpleParse(id) match {
+ case x: Literal => Some(new LiteralCompletion(x))
+ case _ => None
+ }
+ }
+
+ // top level packages
+ object rootClass extends TypeMemberCompletion(RootClass.tpe) {
+ override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
+ override def follow(id: String) = id match {
+ case "_root_" => Some(this)
+ case _ => super.follow(id)
+ }
+ }
+ // members of Predef
+ object predef extends TypeMemberCompletion(PredefModule.tpe) {
+ override def excludeEndsWith = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
+ override def excludeStartsWith = super.excludeStartsWith ++ List("wrap")
+ override def excludeNames = anyref.methodNames
+
+ override def exclude(name: String) = super.exclude(name) || (
+ (name contains "2")
+ )
+
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => Nil
+ case _ => super.completions(verbosity)
+ }
+ }
+ // members of scala.*
+ object scalalang extends PackageCompletion(ScalaPackage.tpe) {
+ def arityClasses = List("Product", "Tuple", "Function")
+ def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
+ override def exclude(name: String) = super.exclude(name) || (
+ skipArity(name)
+ )
+
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(packageNames ++ aliasNames)
+ case _ => super.completions(verbosity)
+ }
+ }
+ // members of java.lang.*
+ object javalang extends PackageCompletion(JavaLangPackage.tpe) {
+ override lazy val excludeEndsWith = super.excludeEndsWith ++ List("Exception", "Error")
+ override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
+
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(packageNames)
+ case _ => super.completions(verbosity)
+ }
+ }
+
+ // the list of completion aware objects which should be consulted
+ // for top level unqualified, it's too noisy to let much in.
+ lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
+ def topLevel = topLevelBase ++ imported
+ def topLevelThreshold = 50
+
+ // the first tier of top level objects (doesn't include file completion)
+ def topLevelFor(parsed: Parsed): List[String] = {
+ val buf = new ListBuffer[String]
+ topLevel foreach { ca =>
+ buf ++= (ca completionsFor parsed)
+
+ if (buf.size > topLevelThreshold)
+ return buf.toList.sorted
+ }
+ buf.toList
+ }
+
+ // the most recent result
+ def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
+
+ def lastResultFor(parsed: Parsed) = {
+ /** The logic is a little tortured right now because normally '.' is
+ * ignored as a delimiter, but on .<tab> it needs to be propagated.
+ */
+ val xs = lastResult completionsFor parsed
+ if (parsed.isEmpty) xs map ("." + _) else xs
+ }
+
+ // generic interface for querying (e.g. interpreter loop, testing)
+ def completions(buf: String): List[String] =
+ topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
+
+ def completer(): ScalaCompleter = new JLineTabCompletion
+
+ /** This gets a little bit hairy. It's no small feat delegating everything
+ * and also keeping track of exactly where the cursor is and where it's supposed
+ * to end up. The alternatives mechanism is a little hacky: if there is an empty
+ * string in the list of completions, that means we are expanding a unique
+ * completion, so don't update the "last" buffer because it'll be wrong.
+ */
+ class JLineTabCompletion extends ScalaCompleter {
+ // For recording the buffer on the last tab hit
+ private var lastBuf: String = ""
+ private var lastCursor: Int = -1
+
+ // Does this represent two consecutive tabs?
+ def isConsecutiveTabs(buf: String, cursor: Int) =
+ cursor == lastCursor && buf == lastBuf
+
+ // Longest common prefix
+ def commonPrefix(xs: List[String]): String = {
+ if (xs.isEmpty || xs.contains("")) ""
+ else xs.head.head match {
+ case ch =>
+ if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
+ else ""
+ }
+ }
+
+ // This is jline's entry point for completion.
+ override def complete(buf: String, cursor: Int): Candidates = {
+ verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
+ logDebug("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
+
+ // we don't try lower priority completions unless higher ones return no results.
+ def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
+ val winners = completionFunction(p)
+ if (winners.isEmpty)
+ return None
+ val newCursor =
+ if (winners contains "") p.cursor
+ else {
+ val advance = commonPrefix(winners)
+ lastCursor = p.position + advance.length
+ lastBuf = (buf take p.position) + advance
+ logDebug("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
+ p, lastBuf, lastCursor, p.position))
+ p.position
+ }
+
+ Some(Candidates(newCursor, winners))
+ }
+
+ def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
+ def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
+
+ // a single dot is special cased to completion on the previous result
+ def lastResultCompletion =
+ if (!looksLikeInvocation(buf)) None
+ else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
+
+ def tryAll = (
+ lastResultCompletion
+ orElse tryCompletion(mkDotted, topLevelFor)
+ getOrElse Candidates(cursor, Nil)
+ )
+
+ /**
+ * This is the kickoff point for all manner of theoretically
+ * possible compiler unhappiness. The fault may be here or
+ * elsewhere, but we don't want to crash the repl regardless.
+ * The compiler makes it impossible to avoid catching Throwable
+ * with its unfortunate tendency to throw java.lang.Errors and
+ * AssertionErrors as the hats drop. We take two swings at it
+ * because there are some spots which like to throw an assertion
+ * once, then work after that. Yeah, what can I say.
+ */
+ try tryAll
+ catch { case ex: Throwable =>
+ logWarning("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
+ Candidates(cursor,
+ if (isReplDebug) List("<error:" + ex + ">")
+ else Nil
+ )
+ }
+ }
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
new file mode 100644
index 0000000000..0db26c3407
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
@@ -0,0 +1,90 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Stepan Koltsov
+ */
+
+package org.apache.spark.repl
+
+import scala.reflect.io.{Path, File}
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+import scala.tools.nsc.interpreter.session.JLineHistory.JLineFileHistory
+
+import scala.tools.jline.console.ConsoleReader
+import scala.tools.jline.console.completer._
+import session._
+import scala.collection.JavaConverters._
+import Completion._
+import io.Streamable.slurp
+
+/**
+ * Reads from the console using JLine.
+ */
+class SparkJLineReader(_completion: => Completion) extends InteractiveReader {
+ val interactive = true
+ val consoleReader = new JLineConsoleReader()
+
+ lazy val completion = _completion
+ lazy val history: JLineHistory = new SparkJLineHistory
+
+ private def term = consoleReader.getTerminal()
+ def reset() = term.reset()
+ def init() = term.init()
+
+ def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
+ def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
+ val buf = if (_buf == null) "" else _buf
+ val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
+ newCandidates foreach (candidates add _)
+ newCursor
+ }
+ }
+
+ class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
+ // working around protected/trait/java insufficiencies.
+ def goBack(num: Int): Unit = back(num)
+ def readOneKey(prompt: String) = {
+ this.print(prompt)
+ this.flush()
+ this.readVirtualKey()
+ }
+ def eraseLine() = consoleReader.resetPromptLine("", "", 0)
+ def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
+ // override def readLine(prompt: String): String
+
+ // A hook for running code after the repl is done initializing.
+ lazy val postInit: Unit = {
+ this setBellEnabled false
+
+ if (completion ne NoCompletion) {
+ val argCompletor: ArgumentCompleter =
+ new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
+ argCompletor setStrict false
+
+ this addCompleter argCompletor
+ this setAutoprintThreshold 400 // max completion candidates without warning
+ }
+ }
+ }
+
+ def currentLine = consoleReader.getCursorBuffer.buffer.toString
+ def redrawLine() = consoleReader.redrawLineAndFlush()
+ def eraseLine() = consoleReader.eraseLine()
+ // Alternate implementation, not sure if/when I need this.
+ // def eraseLine() = while (consoleReader.delete()) { }
+ def readOneLine(prompt: String) = consoleReader readLine prompt
+ def readOneKey(prompt: String) = consoleReader readOneKey prompt
+}
+
+/** Changes the default history file to not collide with the scala repl's. */
+class SparkJLineHistory extends JLineFileHistory {
+ import Properties.userHome
+
+ def defaultFileName = ".spark_history"
+ override protected lazy val historyFile = File(Path(userHome) / defaultFileName)
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
new file mode 100644
index 0000000000..13cd2b7fa5
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
@@ -0,0 +1,232 @@
+// scalastyle:off
+
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.collection.{ mutable, immutable }
+import scala.PartialFunction.cond
+import scala.reflect.internal.Chars
+import scala.reflect.internal.Flags._
+import scala.language.implicitConversions
+
+trait SparkMemberHandlers {
+ val intp: SparkIMain
+
+ import intp.{ Request, global, naming }
+ import global._
+ import naming._
+
+ private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
+ private def codegenln(xs: String*): String = codegenln(true, xs: _*)
+
+ private def codegen(xs: String*): String = codegen(true, xs: _*)
+ private def codegen(leadingPlus: Boolean, xs: String*): String = {
+ val front = if (leadingPlus) "+ " else ""
+ front + (xs map string2codeQuoted mkString " + ")
+ }
+ private implicit def name2string(name: Name) = name.toString
+
+ /** A traverser that finds all mentioned identifiers, i.e. things
+ * that need to be imported. It might return extra names.
+ */
+ private class ImportVarsTraverser extends Traverser {
+ val importVars = new mutable.HashSet[Name]()
+
+ override def traverse(ast: Tree) = ast match {
+ case Ident(name) =>
+ // XXX this is obviously inadequate but it's going to require some effort
+ // to get right.
+ if (name.toString startsWith "x$") ()
+ else importVars += name
+ case _ => super.traverse(ast)
+ }
+ }
+ private object ImportVarsTraverser {
+ def apply(member: Tree) = {
+ val ivt = new ImportVarsTraverser()
+ ivt traverse member
+ ivt.importVars.toList
+ }
+ }
+
+ def chooseHandler(member: Tree): MemberHandler = member match {
+ case member: DefDef => new DefHandler(member)
+ case member: ValDef => new ValHandler(member)
+ case member: Assign => new AssignHandler(member)
+ case member: ModuleDef => new ModuleHandler(member)
+ case member: ClassDef => new ClassHandler(member)
+ case member: TypeDef => new TypeAliasHandler(member)
+ case member: Import => new ImportHandler(member)
+ case DocDef(_, documented) => chooseHandler(documented)
+ case member => new GenericHandler(member)
+ }
+
+ sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+ def symbol = if (member.symbol eq null) NoSymbol else member.symbol
+ def name: Name = member.name
+ def mods: Modifiers = member.mods
+ def keyword = member.keyword
+ def prettyName = name.decode
+
+ override def definesImplicit = member.mods.isImplicit
+ override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
+ override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+ override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
+ }
+
+ /** Class to handle one member among all the members included
+ * in a single interpreter request.
+ */
+ sealed abstract class MemberHandler(val member: Tree) {
+ def definesImplicit = false
+ def definesValue = false
+ def isLegalTopLevel = false
+
+ def definesTerm = Option.empty[TermName]
+ def definesType = Option.empty[TypeName]
+
+ lazy val referencedNames = ImportVarsTraverser(member)
+ def importedNames = List[Name]()
+ def definedNames = definesTerm.toList ++ definesType.toList
+ def definedOrImported = definedNames ++ importedNames
+ def definedSymbols = List[Symbol]()
+
+ def extraCodeToEvaluate(req: Request): String = ""
+ def resultExtractionCode(req: Request): String = ""
+
+ private def shortName = this.getClass.toString split '.' last
+ override def toString = shortName + referencedNames.mkString(" (refs: ", ", ", ")")
+ }
+
+ class GenericHandler(member: Tree) extends MemberHandler(member)
+
+ class ValHandler(member: ValDef) extends MemberDefHandler(member) {
+ val maxStringElements = 1000 // no need to mkString billions of elements
+ override def definesValue = true
+
+ override def resultExtractionCode(req: Request): String = {
+ val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
+ if (!mods.isPublic || isInternal) ""
+ else {
+ // if this is a lazy val we avoid evaluating it here
+ val resultString =
+ if (mods.isLazy) codegenln(false, "<lazy>")
+ else any2stringOf(req fullPath name, maxStringElements)
+
+ val vidString =
+ if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
+ else ""
+
+ """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
+ }
+ }
+ }
+
+ class DefHandler(member: DefDef) extends MemberDefHandler(member) {
+ private def vparamss = member.vparamss
+ private def isMacro = member.symbol hasFlag MACRO
+ // true if not a macro and 0-arity
+ override def definesValue = !isMacro && flattensToEmpty(vparamss)
+ override def resultExtractionCode(req: Request) =
+ if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
+ }
+
+ class AssignHandler(member: Assign) extends MemberHandler(member) {
+ val Assign(lhs, rhs) = member
+ val name = newTermName(freshInternalVarName())
+
+ override def definesTerm = Some(name)
+ override def definesValue = true
+ override def extraCodeToEvaluate(req: Request) =
+ """val %s = %s""".format(name, lhs)
+
+ /** Print out lhs instead of the generated varName */
+ override def resultExtractionCode(req: Request) = {
+ val lhsType = string2code(req lookupTypeOf name)
+ val res = string2code(req fullPath name)
+ """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
+ }
+ }
+
+ class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
+ override def definesTerm = Some(name)
+ override def definesValue = true
+ override def isLegalTopLevel = true
+
+ override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
+ }
+
+ class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
+ override def definesType = Some(name.toTypeName)
+ override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
+ override def isLegalTopLevel = true
+
+ override def resultExtractionCode(req: Request) =
+ codegenln("defined %s %s".format(keyword, name))
+ }
+
+ class TypeAliasHandler(member: TypeDef) extends MemberDefHandler(member) {
+ private def isAlias = mods.isPublic && treeInfo.isAliasTypeDef(member)
+ override def definesType = Some(name.toTypeName) filter (_ => isAlias)
+
+ override def resultExtractionCode(req: Request) =
+ codegenln("defined type alias ", name) + "\n"
+ }
+
+ class ImportHandler(imp: Import) extends MemberHandler(imp) {
+ val Import(expr, selectors) = imp
+ def targetType: Type = intp.typeOfExpression("" + expr)
+ override def isLegalTopLevel = true
+
+ def createImportForName(name: Name): String = {
+ selectors foreach {
+ case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel)
+ case _ => ()
+ }
+ "import %s.%s".format(expr, name)
+ }
+ // TODO: Need to track these specially to honor Predef masking attempts,
+ // because they must be the leading imports in the code generated for each
+ // line. We can use the same machinery as Contexts now, anyway.
+ def isPredefImport = isReferenceToPredef(expr)
+
+ // wildcard imports, e.g. import foo._
+ private def selectorWild = selectors filter (_.name == nme.USCOREkw)
+ // renamed imports, e.g. import foo.{ bar => baz }
+ private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
+ /** Whether this import includes a wildcard import */
+ val importsWildcard = selectorWild.nonEmpty
+
+ /** Whether anything imported is implicit .*/
+ def importsImplicit = implicitSymbols.nonEmpty
+
+ def implicitSymbols = importedSymbols filter (_.isImplicit)
+ def importedSymbols = individualSymbols ++ wildcardSymbols
+
+ lazy val individualSymbols: List[Symbol] =
+ beforePickler(individualNames map (targetType nonPrivateMember _))
+
+ lazy val wildcardSymbols: List[Symbol] =
+ if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
+ else Nil
+
+ /** Complete list of names imported by a wildcard */
+ lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
+ lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
+
+ /** The names imported by this statement */
+ override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
+ lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
+
+ def importString = imp.toString
+ override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
+ }
+}
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
new file mode 100644
index 0000000000..7fd5fbb424
--- /dev/null
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc.Settings
+
+/**
+ * <i>scala.tools.nsc.Settings</i> implementation adding Spark-specific REPL
+ * command line options.
+ */
+class SparkRunnerSettings(error: String => Unit) extends Settings(error){
+
+ val loadfiles = MultiStringSetting(
+ "-i",
+ "file",
+ "load a file (assumes the code is given interactively)")
+}
diff --git a/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala
new file mode 100644
index 0000000000..91c9c52c3c
--- /dev/null
+++ b/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -0,0 +1,318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import java.io._
+import java.net.URLClassLoader
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.scalatest.FunSuite
+import org.apache.spark.SparkContext
+import org.apache.commons.lang3.StringEscapeUtils
+import org.apache.spark.util.Utils
+
+
+class ReplSuite extends FunSuite {
+
+ def runInterpreter(master: String, input: String): String = {
+ val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath"
+
+ val in = new BufferedReader(new StringReader(input + "\n"))
+ val out = new StringWriter()
+ val cl = getClass.getClassLoader
+ var paths = new ArrayBuffer[String]
+ if (cl.isInstanceOf[URLClassLoader]) {
+ val urlLoader = cl.asInstanceOf[URLClassLoader]
+ for (url <- urlLoader.getURLs) {
+ if (url.getProtocol == "file") {
+ paths += url.getFile
+ }
+ }
+ }
+ val classpath = paths.mkString(File.pathSeparator)
+
+ val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
+ System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
+
+ val interp = new SparkILoop(in, new PrintWriter(out), master)
+ org.apache.spark.repl.Main.interp = interp
+ interp.process(Array("-classpath", classpath))
+ org.apache.spark.repl.Main.interp = null
+ if (interp.sparkContext != null) {
+ interp.sparkContext.stop()
+ }
+ if (oldExecutorClasspath != null) {
+ System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)
+ } else {
+ System.clearProperty(CONF_EXECUTOR_CLASSPATH)
+ }
+ return out.toString
+ }
+
+ def assertContains(message: String, output: String) {
+ val isContain = output.contains(message)
+ assert(isContain,
+ "Interpreter output did not contain '" + message + "':\n" + output)
+ }
+
+ def assertDoesNotContain(message: String, output: String) {
+ val isContain = output.contains(message)
+ assert(!isContain,
+ "Interpreter output contained '" + message + "':\n" + output)
+ }
+
+ test("propagation of local properties") {
+ // A mock ILoop that doesn't install the SIGINT handler.
+ class ILoop(out: PrintWriter) extends SparkILoop(None, out, None) {
+ settings = new scala.tools.nsc.Settings
+ settings.usejavacp.value = true
+ org.apache.spark.repl.Main.interp = this
+ override def createInterpreter() {
+ intp = new SparkILoopInterpreter
+ intp.setContextClassLoader()
+ }
+ }
+
+ val out = new StringWriter()
+ val interp = new ILoop(new PrintWriter(out))
+ interp.sparkContext = new SparkContext("local", "repl-test")
+ interp.createInterpreter()
+ interp.intp.initialize()
+ interp.sparkContext.setLocalProperty("someKey", "someValue")
+
+ // Make sure the value we set in the caller to interpret is propagated in the thread that
+ // interprets the command.
+ interp.interpret("org.apache.spark.repl.Main.interp.sparkContext.getLocalProperty(\"someKey\")")
+ assert(out.toString.contains("someValue"))
+
+ interp.sparkContext.stop()
+ System.clearProperty("spark.driver.port")
+ }
+
+ test("simple foreach with accumulator") {
+ val output = runInterpreter("local",
+ """
+ |val accum = sc.accumulator(0)
+ |sc.parallelize(1 to 10).foreach(x => accum += x)
+ |accum.value
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res1: Int = 55", output)
+ }
+
+ test("external vars") {
+ val output = runInterpreter("local",
+ """
+ |var v = 7
+ |sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
+ |v = 10
+ |sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 70", output)
+ assertContains("res1: Int = 100", output)
+ }
+
+ test("external classes") {
+ val output = runInterpreter("local",
+ """
+ |class C {
+ |def foo = 5
+ |}
+ |sc.parallelize(1 to 10).map(x => (new C).foo).collect.reduceLeft(_+_)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 50", output)
+ }
+
+ test("external functions") {
+ val output = runInterpreter("local",
+ """
+ |def double(x: Int) = x + x
+ |sc.parallelize(1 to 10).map(x => double(x)).collect.reduceLeft(_+_)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 110", output)
+ }
+
+ test("external functions that access vars") {
+ val output = runInterpreter("local",
+ """
+ |var v = 7
+ |def getV() = v
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ |v = 10
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 70", output)
+ assertContains("res1: Int = 100", output)
+ }
+
+ test("broadcast vars") {
+ // Test that the value that a broadcast var had when it was created is used,
+ // even if that variable is then modified in the driver program
+ // TODO: This doesn't actually work for arrays when we run in local mode!
+ val output = runInterpreter("local",
+ """
+ |var array = new Array[Int](5)
+ |val broadcastArray = sc.broadcast(array)
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ |array(0) = 5
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+ assertContains("res2: Array[Int] = Array(5, 0, 0, 0, 0)", output)
+ }
+
+ test("interacting with files") {
+ val tempDir = Utils.createTempDir()
+ val out = new FileWriter(tempDir + "/input")
+ out.write("Hello world!\n")
+ out.write("What's up?\n")
+ out.write("Goodbye\n")
+ out.close()
+ val output = runInterpreter("local",
+ """
+ |var file = sc.textFile("%s").cache()
+ |file.count()
+ |file.count()
+ |file.count()
+ """.stripMargin.format(StringEscapeUtils.escapeJava(
+ tempDir.getAbsolutePath + File.separator + "input")))
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Long = 3", output)
+ assertContains("res1: Long = 3", output)
+ assertContains("res2: Long = 3", output)
+ Utils.deleteRecursively(tempDir)
+ }
+
+ test("local-cluster mode") {
+ val output = runInterpreter("local-cluster[1,1,512]",
+ """
+ |var v = 7
+ |def getV() = v
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ |v = 10
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ |var array = new Array[Int](5)
+ |val broadcastArray = sc.broadcast(array)
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ |array(0) = 5
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 70", output)
+ assertContains("res1: Int = 100", output)
+ assertContains("res2: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+ assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+ }
+
+ test("SPARK-1199 two instances of same class don't type check.") {
+ val output = runInterpreter("local-cluster[1,1,512]",
+ """
+ |case class Sum(exp: String, exp2: String)
+ |val a = Sum("A", "B")
+ |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
+ |b(a)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
+ test("SPARK-2452 compound statements.") {
+ val output = runInterpreter("local",
+ """
+ |val x = 4 ; def f() = x
+ |f()
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
+ test("SPARK-2576 importing SQLContext.createSchemaRDD.") {
+ // We need to use local-cluster to test this case.
+ val output = runInterpreter("local-cluster[1,1,512]",
+ """
+ |val sqlContext = new org.apache.spark.sql.SQLContext(sc)
+ |import sqlContext.createSchemaRDD
+ |case class TestCaseClass(value: Int)
+ |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toSchemaRDD.collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
+ test("SPARK-2632 importing a method from non serializable class and not using it.") {
+ val output = runInterpreter("local",
+ """
+ |class TestClass() { def testMethod = 3 }
+ |val t = new TestClass
+ |import t.testMethod
+ |case class TestCaseClass(value: Int)
+ |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
+ if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
+ test("running on Mesos") {
+ val output = runInterpreter("localquiet",
+ """
+ |var v = 7
+ |def getV() = v
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ |v = 10
+ |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
+ |var array = new Array[Int](5)
+ |val broadcastArray = sc.broadcast(array)
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ |array(0) = 5
+ |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("res0: Int = 70", output)
+ assertContains("res1: Int = 100", output)
+ assertContains("res2: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+ assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+ }
+ }
+
+ test("collecting objects of class defined in repl") {
+ val output = runInterpreter("local[2]",
+ """
+ |case class Foo(i: Int)
+ |val ret = sc.parallelize((1 to 100).map(Foo), 10).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ assertContains("ret: Array[Foo] = Array(Foo(1),", output)
+ }
+}