aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-11-24 15:08:02 -0600
committerImran Rashid <irashid@cloudera.com>2015-11-24 15:08:02 -0600
commite6dd237463d2de8c506f0735dfdb3f43e8122513 (patch)
treef55681165f681db8729dfcdd9bbb37d857920c72 /repl
parentf3152722791b163fa66597b3684009058195ba33 (diff)
downloadspark-e6dd237463d2de8c506f0735dfdb3f43e8122513.tar.gz
spark-e6dd237463d2de8c506f0735dfdb3f43e8122513.tar.bz2
spark-e6dd237463d2de8c506f0735dfdb3f43e8122513.zip
[SPARK-11929][CORE] Make the repl log4j configuration override the root logger.
In the default Spark distribution, there are currently two separate log4j config files, with different default values for the root logger, so that when running the shell you have a different default log level. This makes the shell more usable, since the logs don't overwhelm the output. But if you install a custom log4j.properties, you lose that, because then it's going to be used no matter whether you're running a regular app or the shell. With this change, the overriding of the log level is done differently; the log level repl's main class (org.apache.spark.repl.Main) is used to define the root logger's level when running the shell, defaulting to WARN if it's not set explicitly. On a somewhat related change, the shell output about the "sc" variable was changed a bit to contain a little more useful information about the application, since when the root logger's log level is WARN, that information is never shown to the user. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #9816 from vanzin/shell-logging.
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala21
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala25
2 files changed, 24 insertions, 22 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index bd3314d94e..99e1e1df33 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -123,18 +123,19 @@ private[repl] trait SparkILoopInit {
def initializeSpark() {
intp.beQuietDuring {
command("""
- @transient val sc = {
- val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
- println("Spark context available as sc.")
- _sc
- }
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
+ println("Spark context available as sc " +
+ s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
+ _sc
+ }
""")
command("""
- @transient val sqlContext = {
- val _sqlContext = org.apache.spark.repl.Main.interp.createSQLContext()
- println("SQL context available as sqlContext.")
- _sqlContext
- }
+ @transient val sqlContext = {
+ val _sqlContext = org.apache.spark.repl.Main.interp.createSQLContext()
+ println("SQL context available as sqlContext.")
+ _sqlContext
+ }
""")
command("import org.apache.spark.SparkContext._")
command("import sqlContext.implicits._")
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 33d262558b..e91139fb29 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -37,18 +37,19 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def initializeSpark() {
intp.beQuietDuring {
processLine("""
- @transient val sc = {
- val _sc = org.apache.spark.repl.Main.createSparkContext()
- println("Spark context available as sc.")
- _sc
- }
+ @transient val sc = {
+ val _sc = org.apache.spark.repl.Main.createSparkContext()
+ println("Spark context available as sc " +
+ s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
+ _sc
+ }
""")
processLine("""
- @transient val sqlContext = {
- val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
- println("SQL context available as sqlContext.")
- _sqlContext
- }
+ @transient val sqlContext = {
+ val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
+ println("SQL context available as sqlContext.")
+ _sqlContext
+ }
""")
processLine("import org.apache.spark.SparkContext._")
processLine("import sqlContext.implicits._")
@@ -85,7 +86,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
/** Available commands */
override def commands: List[LoopCommand] = sparkStandardCommands
- /**
+ /**
* We override `loadFiles` because we need to initialize Spark *before* the REPL
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
@@ -98,7 +99,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
object SparkILoop {
- /**
+ /**
* Creates an interpreter loop with default settings and feeds
* the given code to it as input.
*/