aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-11-30 13:41:52 -0800
committerJosh Rosen <joshrosen@databricks.com>2015-11-30 13:42:35 -0800
commit2c5dee0fb8e4d1734ea3a0f22e0b5bfd2f6dba46 (patch)
tree656ca8ee9be599c811cabbd9a920cb2dd88df7d6 /core
parentf2fbfa444f6e8d27953ec2d1c0b3abd603c963f9 (diff)
downloadspark-2c5dee0fb8e4d1734ea3a0f22e0b5bfd2f6dba46.tar.gz
spark-2c5dee0fb8e4d1734ea3a0f22e0b5bfd2f6dba46.tar.bz2
spark-2c5dee0fb8e4d1734ea3a0f22e0b5bfd2f6dba46.zip
Revert "[SPARK-11206] Support SQL UI on the history server"
This reverts commit cc243a079b1c039d6e7f0b410d1654d94a090e14 / PR #9297 I'm reverting this because it broke SQLListenerMemoryLeakSuite in the master Maven builds. See #9991 for a discussion of why this broke the tests.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/apache/spark/JavaSparkListener.java3
-rw-r--r--core/src/main/java/org/apache/spark/SparkFirehoseListener.java4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala24
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala16
-rw-r--r--core/src/main/scala/org/apache/spark/util/JsonProtocol.scala11
7 files changed, 6 insertions, 57 deletions
diff --git a/core/src/main/java/org/apache/spark/JavaSparkListener.java b/core/src/main/java/org/apache/spark/JavaSparkListener.java
index 23bc9a2e81..fa9acf0a15 100644
--- a/core/src/main/java/org/apache/spark/JavaSparkListener.java
+++ b/core/src/main/java/org/apache/spark/JavaSparkListener.java
@@ -82,7 +82,4 @@ public class JavaSparkListener implements SparkListener {
@Override
public void onBlockUpdated(SparkListenerBlockUpdated blockUpdated) { }
- @Override
- public void onOtherEvent(SparkListenerEvent event) { }
-
}
diff --git a/core/src/main/java/org/apache/spark/SparkFirehoseListener.java b/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
index e6b24afd88..1214d05ba6 100644
--- a/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
+++ b/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
@@ -118,8 +118,4 @@ public class SparkFirehoseListener implements SparkListener {
onEvent(blockUpdated);
}
- @Override
- public void onOtherEvent(SparkListenerEvent event) {
- onEvent(event);
- }
}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
index eaa07acc51..000a021a52 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
@@ -207,10 +207,6 @@ private[spark] class EventLoggingListener(
// No-op because logging every update would be overkill
override def onExecutorMetricsUpdate(event: SparkListenerExecutorMetricsUpdate): Unit = { }
- override def onOtherEvent(event: SparkListenerEvent): Unit = {
- logEvent(event, flushLogger = true)
- }
-
/**
* Stop logging events. The event log file will be renamed so that it loses the
* ".inprogress" suffix.
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
index 075a7f1317..896f174333 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
@@ -22,19 +22,15 @@ import java.util.Properties
import scala.collection.Map
import scala.collection.mutable
-import com.fasterxml.jackson.annotation.JsonTypeInfo
-
-import org.apache.spark.{Logging, SparkConf, TaskEndReason}
+import org.apache.spark.{Logging, TaskEndReason}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.storage.{BlockManagerId, BlockUpdatedInfo}
import org.apache.spark.util.{Distribution, Utils}
-import org.apache.spark.ui.SparkUI
@DeveloperApi
-@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "Event")
-trait SparkListenerEvent
+sealed trait SparkListenerEvent
@DeveloperApi
case class SparkListenerStageSubmitted(stageInfo: StageInfo, properties: Properties = null)
@@ -135,17 +131,6 @@ case class SparkListenerApplicationEnd(time: Long) extends SparkListenerEvent
private[spark] case class SparkListenerLogStart(sparkVersion: String) extends SparkListenerEvent
/**
- * Interface for creating history listeners defined in other modules like SQL, which are used to
- * rebuild the history UI.
- */
-private[spark] trait SparkHistoryListenerFactory {
- /**
- * Create listeners used to rebuild the history UI.
- */
- def createListeners(conf: SparkConf, sparkUI: SparkUI): Seq[SparkListener]
-}
-
-/**
* :: DeveloperApi ::
* Interface for listening to events from the Spark scheduler. Note that this is an internal
* interface which might change in different Spark releases. Java clients should extend
@@ -238,11 +223,6 @@ trait SparkListener {
* Called when the driver receives a block update info.
*/
def onBlockUpdated(blockUpdated: SparkListenerBlockUpdated) { }
-
- /**
- * Called when other events like SQL-specific events are posted.
- */
- def onOtherEvent(event: SparkListenerEvent) { }
}
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
index 95722a0714..04afde33f5 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
@@ -61,7 +61,6 @@ private[spark] trait SparkListenerBus extends ListenerBus[SparkListener, SparkLi
case blockUpdated: SparkListenerBlockUpdated =>
listener.onBlockUpdated(blockUpdated)
case logStart: SparkListenerLogStart => // ignore event log metadata
- case _ => listener.onOtherEvent(event)
}
}
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 8da6884a38..4608bce202 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -17,13 +17,10 @@
package org.apache.spark.ui
-import java.util.{Date, ServiceLoader}
-
-import scala.collection.JavaConverters._
+import java.util.Date
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationAttemptInfo, ApplicationInfo,
UIRoot}
-import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext}
import org.apache.spark.scheduler._
import org.apache.spark.storage.StorageStatusListener
@@ -157,16 +154,7 @@ private[spark] object SparkUI {
appName: String,
basePath: String,
startTime: Long): SparkUI = {
- val sparkUI = create(
- None, conf, listenerBus, securityManager, appName, basePath, startTime = startTime)
-
- val listenerFactories = ServiceLoader.load(classOf[SparkHistoryListenerFactory],
- Utils.getContextOrSparkClassLoader).asScala
- listenerFactories.foreach { listenerFactory =>
- val listeners = listenerFactory.createListeners(conf, sparkUI)
- listeners.foreach(listenerBus.addListener)
- }
- sparkUI
+ create(None, conf, listenerBus, securityManager, appName, basePath, startTime = startTime)
}
/**
diff --git a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
index 7f5d713ec6..c9beeb25e0 100644
--- a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
@@ -19,21 +19,19 @@ package org.apache.spark.util
import java.util.{Properties, UUID}
+import org.apache.spark.scheduler.cluster.ExecutorInfo
+
import scala.collection.JavaConverters._
import scala.collection.Map
-import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.json4s.DefaultFormats
import org.json4s.JsonDSL._
import org.json4s.JsonAST._
-import org.json4s.jackson.JsonMethods._
import org.apache.spark._
import org.apache.spark.executor._
import org.apache.spark.rdd.RDDOperationScope
import org.apache.spark.scheduler._
-import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.storage._
/**
@@ -56,8 +54,6 @@ private[spark] object JsonProtocol {
private implicit val format = DefaultFormats
- private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
-
/** ------------------------------------------------- *
* JSON serialization methods for SparkListenerEvents |
* -------------------------------------------------- */
@@ -100,7 +96,6 @@ private[spark] object JsonProtocol {
executorMetricsUpdateToJson(metricsUpdate)
case blockUpdated: SparkListenerBlockUpdated =>
throw new MatchError(blockUpdated) // TODO(ekl) implement this
- case _ => parse(mapper.writeValueAsString(event))
}
}
@@ -516,8 +511,6 @@ private[spark] object JsonProtocol {
case `executorRemoved` => executorRemovedFromJson(json)
case `logStart` => logStartFromJson(json)
case `metricsUpdate` => executorMetricsUpdateFromJson(json)
- case other => mapper.readValue(compact(render(json)), Utils.classForName(other))
- .asInstanceOf[SparkListenerEvent]
}
}