aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-07-08 17:35:31 -0700
committerReynold Xin <rxin@apache.org>2014-07-08 17:35:31 -0700
commitbf04a390e40d60aa7fcc551501d25f3f9d38377c (patch)
tree4f929249898687b278f70c0854dceba865701b97
parente6f7bfcfbf6aff7a9f8cd8e0a2166d0bf62b0912 (diff)
downloadspark-bf04a390e40d60aa7fcc551501d25f3f9d38377c.tar.gz
spark-bf04a390e40d60aa7fcc551501d25f3f9d38377c.tar.bz2
spark-bf04a390e40d60aa7fcc551501d25f3f9d38377c.zip
[SPARK-2392] Executors should not start their own HTTP servers
Executors currently start their own unused HTTP file servers. This is because we use the same SparkEnv class for both executors and drivers, and we do not distinguish this case. In the longer term, we should separate out SparkEnv for the driver and SparkEnv for the executors. Author: Andrew Or <andrewor14@gmail.com> Closes #1335 from andrewor14/executor-http-server and squashes the following commits: 46ef263 [Andrew Or] Start HTTP server only on the driver
-rw-r--r--core/src/main/scala/org/apache/spark/SparkEnv.scala14
1 files changed, 10 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 2b636b085d..8f70744d80 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -79,7 +79,7 @@ class SparkEnv (
private[spark] def stop() {
pythonWorkers.foreach { case(key, worker) => worker.stop() }
- httpFileServer.stop()
+ Option(httpFileServer).foreach(_.stop())
mapOutputTracker.stop()
shuffleManager.stop()
broadcastManager.stop()
@@ -228,9 +228,15 @@ object SparkEnv extends Logging {
val cacheManager = new CacheManager(blockManager)
- val httpFileServer = new HttpFileServer(securityManager)
- httpFileServer.initialize()
- conf.set("spark.fileserver.uri", httpFileServer.serverUri)
+ val httpFileServer =
+ if (isDriver) {
+ val server = new HttpFileServer(securityManager)
+ server.initialize()
+ conf.set("spark.fileserver.uri", server.serverUri)
+ server
+ } else {
+ null
+ }
val metricsSystem = if (isDriver) {
MetricsSystem.createMetricsSystem("driver", conf, securityManager)