From f1069b84b82b932751604bc20d5c2e451d57c455 Mon Sep 17 00:00:00 2001 From: zsxwing Date: Fri, 21 Nov 2014 00:42:43 -0800 Subject: [SPARK-4472][Shell] Print "Spark context available as sc." only when SparkContext is created... ... successfully It's weird that printing "Spark context available as sc" when creating SparkContext unsuccessfully. Author: zsxwing Closes #3341 from zsxwing/SPARK-4472 and squashes the following commits: 4850093 [zsxwing] Print "Spark context available as sc." only when SparkContext is created successfully --- .../src/main/scala/org/apache/spark/repl/SparkILoopInit.scala | 7 +++++-- .../src/main/scala/org/apache/spark/repl/SparkILoop.scala | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) (limited to 'repl') diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 7667a9c119..da4286c5e4 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -121,11 +121,14 @@ trait SparkILoopInit { def initializeSpark() { intp.beQuietDuring { command(""" - @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext(); + @transient val sc = { + val _sc = org.apache.spark.repl.Main.interp.createSparkContext() + println("Spark context available as sc.") + _sc + } """) command("import org.apache.spark.SparkContext._") } - echo("Spark context available as sc.") } // code to be executed only after the interpreter is initialized diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index a591e9fc46..2507273059 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -61,11 +61,14 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def initializeSpark() { intp.beQuietDuring { command( """ - @transient val sc = org.apache.spark.repl.Main.createSparkContext(); + @transient val sc = { + val _sc = org.apache.spark.repl.Main.createSparkContext() + println("Spark context available as sc.") + _sc + } """) command("import org.apache.spark.SparkContext._") } - echo("Spark context available as sc.") } /** Print a welcome message */ -- cgit v1.2.3