aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-02-22 14:01:35 -0800
committerJosh Rosen <joshrosen@databricks.com>2016-02-22 14:01:35 -0800
commit4a91806a45a48432c3ea4c2aaa553177952673e9 (patch)
tree23ae670fd286bd0f7ae827579cb464534bddb520
parent00461bb911c31aff9c945a14e23df2af4c280c23 (diff)
downloadspark-4a91806a45a48432c3ea4c2aaa553177952673e9.tar.gz
spark-4a91806a45a48432c3ea4c2aaa553177952673e9.tar.bz2
spark-4a91806a45a48432c3ea4c2aaa553177952673e9.zip
[SPARK-13413] Remove SparkContext.metricsSystem
## What changes were proposed in this pull request? This patch removes SparkContext.metricsSystem. SparkContext.metricsSystem returns MetricsSystem, which is a private class. I think it was added by accident. In addition, I also removed an unused private[spark] method schedulerBackend setter. ## How was the this patch tested? N/A. Author: Reynold Xin <rxin@databricks.com> This patch had conflicts when merged, resolved by Committer: Josh Rosen <joshrosen@databricks.com> Closes #11282 from rxin/SPARK-13413.
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala9
-rw-r--r--project/MimaExcludes.scala6
2 files changed, 7 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index c001df31aa..cd7eed382e 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -297,9 +297,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
val sparkUser = Utils.getCurrentUserName()
private[spark] def schedulerBackend: SchedulerBackend = _schedulerBackend
- private[spark] def schedulerBackend_=(sb: SchedulerBackend): Unit = {
- _schedulerBackend = sb
- }
private[spark] def taskScheduler: TaskScheduler = _taskScheduler
private[spark] def taskScheduler_=(ts: TaskScheduler): Unit = {
@@ -322,8 +319,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
def applicationId: String = _applicationId
def applicationAttemptId: Option[String] = _applicationAttemptId
- def metricsSystem: MetricsSystem = if (_env != null) _env.metricsSystem else null
-
private[spark] def eventLogger: Option[EventLoggingListener] = _eventLogger
private[spark] def executorAllocationManager: Option[ExecutorAllocationManager] =
@@ -514,9 +509,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
// The metrics system for Driver need to be set spark.app.id to app ID.
// So it should start after we get app ID from the task scheduler and set spark.app.id.
- metricsSystem.start()
+ _env.metricsSystem.start()
// Attach the driver metrics servlet handler to the web ui after the metrics system is started.
- metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))
+ _env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))
_eventLogger =
if (isEventLogEnabled) {
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 97a1e8b433..746223f39e 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -261,9 +261,13 @@ object MimaExcludes {
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets$default$3"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.impl.GraphImpl.mapReduceTriplets")
- ) ++Seq(
+ ) ++ Seq(
// SPARK-13426 Remove the support of SIMR
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkMasterRegex.SIMR_REGEX")
+ ) ++ Seq(
+ // SPARK-13413 Remove SparkContext.metricsSystem/schedulerBackend_ setter
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.metricsSystem"),
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.schedulerBackend_=")
)
case v if v.startsWith("1.6") =>
Seq(