diff options
author | Prajwal Tuladhar <praj@infynyxx.com> | 2016-06-22 16:30:10 -0700 |
---|---|---|
committer | Shixiong Zhu <shixiong@databricks.com> | 2016-06-22 16:30:10 -0700 |
commit | 044971eca0ff3c2ce62afa665dbd3072d52cbbec (patch) | |
tree | c42da548582705dcc24c71da0fff4e7751f21ff4 /core/src | |
parent | 857ecff1d8268b28bb287e47cda370c87afe9d41 (diff) | |
download | spark-044971eca0ff3c2ce62afa665dbd3072d52cbbec.tar.gz spark-044971eca0ff3c2ce62afa665dbd3072d52cbbec.tar.bz2 spark-044971eca0ff3c2ce62afa665dbd3072d52cbbec.zip |
[SPARK-16131] initialize internal logger lazily in Scala preferred way
## What changes were proposed in this pull request?
Initialize logger instance lazily in Scala preferred way
## How was this patch tested?
By running `./build/mvn clean test` locally
Author: Prajwal Tuladhar <praj@infynyxx.com>
Closes #13842 from infynyxx/spark_internal_logger.
Diffstat (limited to 'core/src')
-rw-r--r-- | core/src/main/scala/org/apache/spark/internal/Logging.scala | 14 | ||||
-rw-r--r-- | core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala | 2 |
2 files changed, 4 insertions, 12 deletions
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala index 66a0cfec62..c51050c13d 100644 --- a/core/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala @@ -32,7 +32,10 @@ private[spark] trait Logging { // Make the log field transient so that objects with Logging can // be serialized and used on another machine - @transient private var log_ : Logger = null + @transient lazy val log: Logger = { + initializeLogIfNecessary(false) + LoggerFactory.getLogger(logName) + } // Method to get the logger name for this object protected def logName = { @@ -40,15 +43,6 @@ private[spark] trait Logging { this.getClass.getName.stripSuffix("$") } - // Method to get or create the logger for this object - protected def log: Logger = { - if (log_ == null) { - initializeLogIfNecessary(false) - log_ = LoggerFactory.getLogger(logName) - } - log_ - } - // Log methods that take only a String protected def logInfo(msg: => String) { if (log.isInfoEnabled) log.info(msg) diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala index 967c4d5325..8259923ce3 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala @@ -100,8 +100,6 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, val rpcEnv: Rp // instance across threads private val ser = SparkEnv.get.closureSerializer.newInstance() - override protected def log = CoarseGrainedSchedulerBackend.this.log - protected val addressToExecutorId = new HashMap[RpcAddress, String] private val reviveThread = |