diff options
author | Ryan Williams <ryan.blake.williams@gmail.com> | 2015-01-25 14:20:02 -0800 |
---|---|---|
committer | Andrew Or <andrew@databricks.com> | 2015-01-25 14:21:21 -0800 |
commit | c573af4f1d3022a4712458a4b3b5061a8017e040 (patch) | |
tree | 809f2aca22bcb978f2c669923866c4f5e666ae34 | |
parent | 1f8b7186813643afdb592131bfd89228ef971e74 (diff) | |
download | spark-c573af4f1d3022a4712458a4b3b5061a8017e040.tar.gz spark-c573af4f1d3022a4712458a4b3b5061a8017e040.tar.bz2 spark-c573af4f1d3022a4712458a4b3b5061a8017e040.zip |
[SPARK-5402] log executor ID at executor-construction time
also rename "slaveHostname" to "executorHostname"
Author: Ryan Williams <ryan.blake.williams@gmail.com>
Closes #4195 from ryan-williams/exec and squashes the following commits:
e60a7bb [Ryan Williams] log executor ID at executor-construction time
(cherry picked from commit aea25482c370fbcf712a464501605bc16ee4ed5d)
Signed-off-by: Andrew Or <andrew@databricks.com>
Conflicts:
core/src/main/scala/org/apache/spark/executor/Executor.scala
-rw-r--r-- | core/src/main/scala/org/apache/spark/executor/Executor.scala | 13 |
1 files changed, 8 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index da030f231f..eaf0c82d52 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -41,13 +41,16 @@ import org.apache.spark.util.{SparkUncaughtExceptionHandler, AkkaUtils, Utils} */ private[spark] class Executor( executorId: String, - slaveHostname: String, + executorHostname: String, properties: Seq[(String, String)], numCores: Int, isLocal: Boolean = false, actorSystem: ActorSystem = null) extends Logging { + + logInfo(s"Starting executor ID $executorId on host $executorHostname") + // Application dependencies (added through SparkContext) that we've fetched so far on this node. // Each map holds the master's timestamp for the version of that file or JAR we got. private val currentFiles: HashMap[String, Long] = new HashMap[String, Long]() @@ -58,12 +61,12 @@ private[spark] class Executor( @volatile private var isStopped = false // No ip or host:port - just hostname - Utils.checkHost(slaveHostname, "Expected executed slave to be a hostname") + Utils.checkHost(executorHostname, "Expected executed slave to be a hostname") // must not have port specified. - assert (0 == Utils.parseHostPort(slaveHostname)._2) + assert (0 == Utils.parseHostPort(executorHostname)._2) // Make sure the local hostname we report matches the cluster scheduler's name for this host - Utils.setCustomHostname(slaveHostname) + Utils.setCustomHostname(executorHostname) // Set spark.* properties from executor arg val conf = new SparkConf(true) @@ -84,7 +87,7 @@ private[spark] class Executor( if (!isLocal) { val port = conf.getInt("spark.executor.port", 0) val _env = SparkEnv.createExecutorEnv( - conf, executorId, slaveHostname, port, numCores, isLocal, actorSystem) + conf, executorId, executorHostname, port, numCores, isLocal, actorSystem) SparkEnv.set(_env) _env.metricsSystem.registerSource(executorSource) _env.blockManager.initialize(conf.getAppId) |