aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
diff options
context:
space:
mode:
Diffstat (limited to 'repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala')
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala21
1 files changed, 13 insertions, 8 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 76a66c1bea..f2ad7adcd1 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -21,7 +21,7 @@ import java.io.BufferedReader
import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
+import scala.tools.nsc.interpreter.{ILoop, JPrintWriter, replProps}
import scala.tools.nsc.util.stringFromStream
import scala.util.Properties.{javaVersion, javaVmName, versionString}
@@ -63,8 +63,8 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
""")
processLine("import org.apache.spark.SparkContext._")
processLine("import spark.implicits._")
- processLine("import spark.sql")
- processLine("import org.apache.spark.sql.functions._")
+ //TODO 2.12 processLine("import spark.sql")
+ //TODO 2.12 processLine("import org.apache.spark.sql.functions._")
replayCommandStack = Nil // remove above commands from session history.
}
}
@@ -86,11 +86,17 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
echo("Type :help for more information.")
}
+ private def initCommand(): Result = {
+ initializeSpark
+ Result(keepRunning = true, lineToRecord = None)
+ }
+
/** Add repl commands that needs to be blocked. e.g. reset */
private val blockedCommands = Set[String]()
/** Standard commands */
lazy val sparkStandardCommands: List[SparkILoop.this.LoopCommand] =
+ LoopCommand.nullary("initSpark", "initialize spark context", initCommand) ::
standardCommands.filter(cmd => !blockedCommands(cmd.name))
/** Available commands */
@@ -101,11 +107,10 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
*/
- override def loadFiles(settings: Settings): Unit = {
- initializeSpark()
- super.loadFiles(settings)
- }
-
+ // override def loadFiles(settings: Settings): Unit = {
+ // initializeSpark()
+ // super.loadFiles(settings)
+ // }
override def resetCommand(line: String): Unit = {
super.resetCommand(line)
initializeSpark()