aboutsummaryrefslogtreecommitdiff
path: root/repl/src/main/scala
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.iiith@gmail.com>2013-03-20 00:18:04 +0530
committerPrashant Sharma <prashant.iiith@gmail.com>2013-03-20 00:18:04 +0530
commitd9f34e505d88daa6e3665b40ab70dab41e277c9d (patch)
treecb5ebb6cac79720e69423b699b7463ae4d7d2b6b /repl/src/main/scala
parent432a227320e505a1790d6fb22463ab3eba4fc830 (diff)
downloadspark-d9f34e505d88daa6e3665b40ab70dab41e277c9d.tar.gz
spark-d9f34e505d88daa6e3665b40ab70dab41e277c9d.tar.bz2
spark-d9f34e505d88daa6e3665b40ab70dab41e277c9d.zip
Ctrl-D hang bug fixed!
Diffstat (limited to 'repl/src/main/scala')
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala13
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoopInit.scala1
2 files changed, 13 insertions, 1 deletions
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 2f2b5b2372..1abcc8131c 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
finally in = saved
}
+ /*PRASHANT:Detecting if a lazy val has been materialized or not is possible but not worth it
+ * as in most cases of spark shell usages they will be. Incase they are not user will find
+ * shutdown slower than the shell start up itself
+ * */
+ def sparkCleanUp(){
+ echo("Stopping spark context.")
+ intp.beQuietDuring {
+ command("sc.stop()")
+ }
+ }
/** Close the interpreter and set the var to null. */
def closeInterpreter() {
if (intp ne null) {
+ sparkCleanUp()
intp.close()
intp = null
}
@@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get)
})
+ addThunk(initializeSpark())
loadFiles(settings)
// it is broken on startup; go ahead and exit
@@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
// message to an actor.
if (isAsync) {
intp initialize initializedCallback()
- addThunk(initializeSpark())
createAsyncListener() // listens for signal to run postInitialization
}
else {
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index b275faf981..c3d7f45dc9 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -121,6 +121,7 @@ trait SparkILoopInit {
""")
command("import spark.SparkContext._");
}
+ echo("... Spark context available as sc.")
}
// code to be executed only after the interpreter is initialized