From f4fd7432fb9cf7b197ccada1378c4f2a6d427522 Mon Sep 17 00:00:00 2001 From: "peng.zhang" Date: Fri, 24 Jun 2016 08:28:32 +0100 Subject: [SPARK-16125][YARN] Fix not test yarn cluster mode correctly in YarnClusterSuite ## What changes were proposed in this pull request? Since SPARK-13220(Deprecate "yarn-client" and "yarn-cluster"), YarnClusterSuite doesn't test "yarn cluster" mode correctly. This pull request fixes it. ## How was this patch tested? Unit test (If this patch involves UI changes, please attach a screenshot; otherwise, remove this) Author: peng.zhang Closes #13836 from renozhang/SPARK-16125-test-yarn-cluster-mode. --- core/src/test/scala/org/apache/spark/util/UtilsSuite.scala | 3 ++- python/pyspark/context.py | 4 ---- repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 2 -- .../test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | 2 +- 4 files changed, 3 insertions(+), 8 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index e3a8e83f3e..df279b5a37 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -754,7 +754,8 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { test("isDynamicAllocationEnabled") { val conf = new SparkConf() - conf.set("spark.master", "yarn-client") + conf.set("spark.master", "yarn") + conf.set("spark.submit.deployMode", "client") assert(Utils.isDynamicAllocationEnabled(conf) === false) assert(Utils.isDynamicAllocationEnabled( conf.set("spark.dynamicAllocation.enabled", "false")) === false) diff --git a/python/pyspark/context.py b/python/pyspark/context.py index aec0215b40..7217a9907a 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -155,10 +155,6 @@ class SparkContext(object): self.appName = self._conf.get("spark.app.name") self.sparkHome = self._conf.get("spark.home", None) - # Let YARN know it's a pyspark app, so it distributes needed libraries. - if self.master == "yarn-client": - self._conf.set("spark.yarn.isPython", "true") - for (k, v) in self._conf.getAll(): if k.startswith("spark.executorEnv."): varName = k[len("spark.executorEnv."):] diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 8fcab386ec..e871004173 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -943,8 +943,6 @@ class SparkILoop( }) private def process(settings: Settings): Boolean = savingContextLoader { - if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true") - this.settings = settings createInterpreter() diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 4ce33e0e85..6b20dea590 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -312,7 +312,7 @@ private object YarnClusterDriver extends Logging with Matchers { // If we are running in yarn-cluster mode, verify that driver logs links and present and are // in the expected format. - if (conf.get("spark.master") == "yarn-cluster") { + if (conf.get("spark.submit.deployMode") == "cluster") { assert(listener.driverLogs.nonEmpty) val driverLogs = listener.driverLogs.get assert(driverLogs.size === 2) -- cgit v1.2.3