aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2013-04-16 11:13:51 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2013-04-16 11:13:51 +0530
commit36ccb35371682d5b960e9cbcc80bca7c5db4ce49 (patch)
treed06613dfffe678b2f948577386a0187af3afa821 /repl
parent19b0256ae455d8dcfa3b2eafb602633eff9d4f1c (diff)
downloadspark-36ccb35371682d5b960e9cbcc80bca7c5db4ce49.tar.gz
spark-36ccb35371682d5b960e9cbcc80bca7c5db4ce49.tar.bz2
spark-36ccb35371682d5b960e9cbcc80bca7c5db4ce49.zip
Changed spark context as lazy val to become just val.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala4
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoopInit.scala4
2 files changed, 5 insertions, 3 deletions
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 904a72244f..28a7c161f6 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -885,6 +885,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get)
})
+
+ addThunk(printWelcome())
addThunk(initializeSpark())
loadFiles(settings)
@@ -905,7 +907,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
intp.initializeSynchronous()
postInitialization()
}
- printWelcome()
+ // printWelcome()
try loop()
catch AbstractOrMissingHandler()
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index c3d7f45dc9..6ae535c4e6 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -117,11 +117,11 @@ trait SparkILoopInit {
def initializeSpark() {
intp.beQuietDuring {
command("""
- @transient lazy val sc = spark.repl.Main.interp.createSparkContext();
+ @transient val sc = spark.repl.Main.interp.createSparkContext();
""")
command("import spark.SparkContext._");
}
- echo("... Spark context available as sc.")
+ echo("Spark context available as sc.")
}
// code to be executed only after the interpreter is initialized