aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala9
-rw-r--r--project/MimaExcludes.scala6
2 files changed, 7 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index c001df31aa..cd7eed382e 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -297,9 +297,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
val sparkUser = Utils.getCurrentUserName()
private[spark] def schedulerBackend: SchedulerBackend = _schedulerBackend
- private[spark] def schedulerBackend_=(sb: SchedulerBackend): Unit = {
- _schedulerBackend = sb
- }
private[spark] def taskScheduler: TaskScheduler = _taskScheduler
private[spark] def taskScheduler_=(ts: TaskScheduler): Unit = {
@@ -322,8 +319,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
def applicationId: String = _applicationId
def applicationAttemptId: Option[String] = _applicationAttemptId
- def metricsSystem: MetricsSystem = if (_env != null) _env.metricsSystem else null
-
private[spark] def eventLogger: Option[EventLoggingListener] = _eventLogger
private[spark] def executorAllocationManager: Option[ExecutorAllocationManager] =
@@ -514,9 +509,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
// The metrics system for Driver need to be set spark.app.id to app ID.
// So it should start after we get app ID from the task scheduler and set spark.app.id.
- metricsSystem.start()
+ _env.metricsSystem.start()
// Attach the driver metrics servlet handler to the web ui after the metrics system is started.
- metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))
+ _env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))
_eventLogger =
if (isEventLogEnabled) {
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 97a1e8b433..746223f39e 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -261,9 +261,13 @@ object MimaExcludes {
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.Graph.mapReduceTriplets$default$3"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.graphx.impl.GraphImpl.mapReduceTriplets")
- ) ++Seq(
+ ) ++ Seq(
// SPARK-13426 Remove the support of SIMR
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkMasterRegex.SIMR_REGEX")
+ ) ++ Seq(
+ // SPARK-13413 Remove SparkContext.metricsSystem/schedulerBackend_ setter
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.metricsSystem"),
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.schedulerBackend_=")
)
case v if v.startsWith("1.6") =>
Seq(