aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala21
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala25
2 files changed, 24 insertions, 22 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index bd3314d94e..99e1e1df33 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -123,18 +123,19 @@ private[repl] trait SparkILoopInit {
def initializeSpark() {
intp.beQuietDuring {
command("""
- @transient val sc = {
- val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
- println("Spark context available as sc.")
- _sc
- }
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
+ println("Spark context available as sc " +
+ s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
+ _sc
+ }
""")
command("""
- @transient val sqlContext = {
- val _sqlContext = org.apache.spark.repl.Main.interp.createSQLContext()
- println("SQL context available as sqlContext.")
- _sqlContext
- }
+ @transient val sqlContext = {
+ val _sqlContext = org.apache.spark.repl.Main.interp.createSQLContext()
+ println("SQL context available as sqlContext.")
+ _sqlContext
+ }
""")
command("import org.apache.spark.SparkContext._")
command("import sqlContext.implicits._")
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 33d262558b..e91139fb29 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -37,18 +37,19 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def initializeSpark() {
intp.beQuietDuring {
processLine("""
- @transient val sc = {
- val _sc = org.apache.spark.repl.Main.createSparkContext()
- println("Spark context available as sc.")
- _sc
- }
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.createSparkContext()
+ println("Spark context available as sc " +
+ s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
+ _sc
+ }
""")
processLine("""
- @transient val sqlContext = {
- val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
- println("SQL context available as sqlContext.")
- _sqlContext
- }
+ @transient val sqlContext = {
+ val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
+ println("SQL context available as sqlContext.")
+ _sqlContext
+ }
""")
processLine("import org.apache.spark.SparkContext._")
processLine("import sqlContext.implicits._")
@@ -85,7 +86,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
/** Available commands */
override def commands: List[LoopCommand] = sparkStandardCommands
- /**
+ /**
* We override `loadFiles` because we need to initialize Spark *before* the REPL
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
@@ -98,7 +99,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
object SparkILoop {
- /**
+ /**
* Creates an interpreter loop with default settings and feeds
* the given code to it as input.
*/