diff options
author | zsxwing <zsxwing@gmail.com> | 2014-11-21 00:42:43 -0800 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2014-11-21 00:42:43 -0800 |
commit | f1069b84b82b932751604bc20d5c2e451d57c455 (patch) | |
tree | b8670576089a7e326e064952ae03b528930495dd | |
parent | 28fdc6f6828df32d413d6c76dbfd2d13b1991c45 (diff) | |
download | spark-f1069b84b82b932751604bc20d5c2e451d57c455.tar.gz spark-f1069b84b82b932751604bc20d5c2e451d57c455.tar.bz2 spark-f1069b84b82b932751604bc20d5c2e451d57c455.zip |
[SPARK-4472][Shell] Print "Spark context available as sc." only when SparkContext is created...
... successfully
It's weird that printing "Spark context available as sc" when creating SparkContext unsuccessfully.
Author: zsxwing <zsxwing@gmail.com>
Closes #3341 from zsxwing/SPARK-4472 and squashes the following commits:
4850093 [zsxwing] Print "Spark context available as sc." only when SparkContext is created successfully
-rw-r--r-- | repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala | 7 | ||||
-rw-r--r-- | repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 7 |
2 files changed, 10 insertions, 4 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 7667a9c119..da4286c5e4 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -121,11 +121,14 @@ trait SparkILoopInit { def initializeSpark() { intp.beQuietDuring { command(""" - @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext(); + @transient val sc = { + val _sc = org.apache.spark.repl.Main.interp.createSparkContext() + println("Spark context available as sc.") + _sc + } """) command("import org.apache.spark.SparkContext._") } - echo("Spark context available as sc.") } // code to be executed only after the interpreter is initialized diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index a591e9fc46..2507273059 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -61,11 +61,14 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def initializeSpark() { intp.beQuietDuring { command( """ - @transient val sc = org.apache.spark.repl.Main.createSparkContext(); + @transient val sc = { + val _sc = org.apache.spark.repl.Main.createSparkContext() + println("Spark context available as sc.") + _sc + } """) command("import org.apache.spark.SparkContext._") } - echo("Spark context available as sc.") } /** Print a welcome message */ |