aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorzsxwing <zsxwing@gmail.com>2014-11-21 00:42:43 -0800
committerReynold Xin <rxin@databricks.com>2014-11-21 00:43:10 -0800
commit6f70e0295572e3037660004797040e026e440dbd (patch)
tree51bc7299704b438751cbaaa5705951760ea6ff1b /repl
parent668643b8de0958094766fa62e7e2a7a0909f11da (diff)
downloadspark-6f70e0295572e3037660004797040e026e440dbd.tar.gz
spark-6f70e0295572e3037660004797040e026e440dbd.tar.bz2
spark-6f70e0295572e3037660004797040e026e440dbd.zip
[SPARK-4472][Shell] Print "Spark context available as sc." only when SparkContext is created...
... successfully It's weird that printing "Spark context available as sc" when creating SparkContext unsuccessfully. Author: zsxwing <zsxwing@gmail.com> Closes #3341 from zsxwing/SPARK-4472 and squashes the following commits: 4850093 [zsxwing] Print "Spark context available as sc." only when SparkContext is created successfully (cherry picked from commit f1069b84b82b932751604bc20d5c2e451d57c455) Signed-off-by: Reynold Xin <rxin@databricks.com>
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala7
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala7
2 files changed, 10 insertions, 4 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 7667a9c119..da4286c5e4 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -121,11 +121,14 @@ trait SparkILoopInit {
def initializeSpark() {
intp.beQuietDuring {
command("""
- @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
+ println("Spark context available as sc.")
+ _sc
+ }
""")
command("import org.apache.spark.SparkContext._")
}
- echo("Spark context available as sc.")
}
// code to be executed only after the interpreter is initialized
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index a591e9fc46..2507273059 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -61,11 +61,14 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def initializeSpark() {
intp.beQuietDuring {
command( """
- @transient val sc = org.apache.spark.repl.Main.createSparkContext();
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.createSparkContext()
+ println("Spark context available as sc.")
+ _sc
+ }
""")
command("import org.apache.spark.SparkContext._")
}
- echo("Spark context available as sc.")
}
/** Print a welcome message */