diff options
author | Reynold Xin <rxin@databricks.com> | 2016-02-22 14:01:35 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2016-02-22 14:01:35 -0800 |
commit | 4a91806a45a48432c3ea4c2aaa553177952673e9 (patch) | |
tree | 23ae670fd286bd0f7ae827579cb464534bddb520 /core/src | |
parent | 00461bb911c31aff9c945a14e23df2af4c280c23 (diff) | |
download | spark-4a91806a45a48432c3ea4c2aaa553177952673e9.tar.gz spark-4a91806a45a48432c3ea4c2aaa553177952673e9.tar.bz2 spark-4a91806a45a48432c3ea4c2aaa553177952673e9.zip |
[SPARK-13413] Remove SparkContext.metricsSystem
## What changes were proposed in this pull request?
This patch removes SparkContext.metricsSystem. SparkContext.metricsSystem returns MetricsSystem, which is a private class. I think it was added by accident.
In addition, I also removed an unused private[spark] method schedulerBackend setter.
## How was the this patch tested?
N/A.
Author: Reynold Xin <rxin@databricks.com>
This patch had conflicts when merged, resolved by
Committer: Josh Rosen <joshrosen@databricks.com>
Closes #11282 from rxin/SPARK-13413.
Diffstat (limited to 'core/src')
-rw-r--r-- | core/src/main/scala/org/apache/spark/SparkContext.scala | 9 |
1 files changed, 2 insertions, 7 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index c001df31aa..cd7eed382e 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -297,9 +297,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli val sparkUser = Utils.getCurrentUserName() private[spark] def schedulerBackend: SchedulerBackend = _schedulerBackend - private[spark] def schedulerBackend_=(sb: SchedulerBackend): Unit = { - _schedulerBackend = sb - } private[spark] def taskScheduler: TaskScheduler = _taskScheduler private[spark] def taskScheduler_=(ts: TaskScheduler): Unit = { @@ -322,8 +319,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli def applicationId: String = _applicationId def applicationAttemptId: Option[String] = _applicationAttemptId - def metricsSystem: MetricsSystem = if (_env != null) _env.metricsSystem else null - private[spark] def eventLogger: Option[EventLoggingListener] = _eventLogger private[spark] def executorAllocationManager: Option[ExecutorAllocationManager] = @@ -514,9 +509,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli // The metrics system for Driver need to be set spark.app.id to app ID. // So it should start after we get app ID from the task scheduler and set spark.app.id. - metricsSystem.start() + _env.metricsSystem.start() // Attach the driver metrics servlet handler to the web ui after the metrics system is started. - metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler))) + _env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler))) _eventLogger = if (isEventLogEnabled) { |