diff options
-rw-r--r-- | docs/running-on-yarn.md | 7 | ||||
-rw-r--r-- | yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala | 3 |
2 files changed, 9 insertions, 1 deletions
diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md index b6701b64c2..4fb4a90307 100644 --- a/docs/running-on-yarn.md +++ b/docs/running-on-yarn.md @@ -134,6 +134,13 @@ Most of the configs are the same for Spark on YARN as for other deployment modes </td> </tr> <tr> + <td><code>spark.yarn.am.port</code></td> + <td>(random)</td> + <td> + Port for the YARN Application Master to listen on. In YARN client mode, this is used to communicate between the Spark driver running on a gateway and the Application Master running on YARN. In YARN cluster mode, this is used for the dynamic executor feature, where it handles the kill from the scheduler backend. + </td> +</tr> +<tr> <td><code>spark.yarn.queue</code></td> <td>default</td> <td> diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index e1694c1f64..29752969e6 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -285,7 +285,8 @@ private[spark] class ApplicationMaster( } private def runExecutorLauncher(securityMgr: SecurityManager): Unit = { - rpcEnv = RpcEnv.create("sparkYarnAM", Utils.localHostName, 0, sparkConf, securityMgr) + val port = sparkConf.getInt("spark.yarn.am.port", 0) + rpcEnv = RpcEnv.create("sparkYarnAM", Utils.localHostName, port, sparkConf, securityMgr) waitForSparkDriver() addAmIpFilter() registerAM(sparkConf.get("spark.driver.appUIAddress", ""), securityMgr) |