aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorJakob Odersky <jakob@odersky.com>2017-01-10 14:34:33 -0800
committerJakob Odersky <jakob@odersky.com>2017-04-24 14:09:49 -0700
commita3860c59deebf996f0c32bcc0d15b2903216e732 (patch)
tree91f13ce2c756631387950a8f58a44a7dda2d1566 /repl
parent3609c837f3aa989f0ae7cbf1fd177bb9f3cba7a2 (diff)
downloadspark-a3860c59deebf996f0c32bcc0d15b2903216e732.tar.gz
spark-a3860c59deebf996f0c32bcc0d15b2903216e732.tar.bz2
spark-a3860c59deebf996f0c32bcc0d15b2903216e732.zip
REPL compiles and runs
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala21
1 files changed, 13 insertions, 8 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 76a66c1bea..f2ad7adcd1 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -21,7 +21,7 @@ import java.io.BufferedReader
import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
+import scala.tools.nsc.interpreter.{ILoop, JPrintWriter, replProps}
import scala.tools.nsc.util.stringFromStream
import scala.util.Properties.{javaVersion, javaVmName, versionString}
@@ -63,8 +63,8 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
""")
processLine("import org.apache.spark.SparkContext._")
processLine("import spark.implicits._")
- processLine("import spark.sql")
- processLine("import org.apache.spark.sql.functions._")
+ //TODO 2.12 processLine("import spark.sql")
+ //TODO 2.12 processLine("import org.apache.spark.sql.functions._")
replayCommandStack = Nil // remove above commands from session history.
}
}
@@ -86,11 +86,17 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
echo("Type :help for more information.")
}
+ private def initCommand(): Result = {
+ initializeSpark
+ Result(keepRunning = true, lineToRecord = None)
+ }
+
/** Add repl commands that needs to be blocked. e.g. reset */
private val blockedCommands = Set[String]()
/** Standard commands */
lazy val sparkStandardCommands: List[SparkILoop.this.LoopCommand] =
+ LoopCommand.nullary("initSpark", "initialize spark context", initCommand) ::
standardCommands.filter(cmd => !blockedCommands(cmd.name))
/** Available commands */
@@ -101,11 +107,10 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
*/
- override def loadFiles(settings: Settings): Unit = {
- initializeSpark()
- super.loadFiles(settings)
- }
-
+ // override def loadFiles(settings: Settings): Unit = {
+ // initializeSpark()
+ // super.loadFiles(settings)
+ // }
override def resetCommand(line: String): Unit = {
super.resetCommand(line)
initializeSpark()