aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorpeng.zhang <peng.zhang@xiaomi.com>2016-06-24 08:28:32 +0100
committerSean Owen <sowen@cloudera.com>2016-06-24 08:28:32 +0100
commitf4fd7432fb9cf7b197ccada1378c4f2a6d427522 (patch)
treed63d5af02e5d87ecde98e891b278c467279d8ba6
parent2d2f607bfae97f2681df24f48bb8b1b483c6b309 (diff)
downloadspark-f4fd7432fb9cf7b197ccada1378c4f2a6d427522.tar.gz
spark-f4fd7432fb9cf7b197ccada1378c4f2a6d427522.tar.bz2
spark-f4fd7432fb9cf7b197ccada1378c4f2a6d427522.zip
[SPARK-16125][YARN] Fix not test yarn cluster mode correctly in YarnClusterSuite
## What changes were proposed in this pull request? Since SPARK-13220(Deprecate "yarn-client" and "yarn-cluster"), YarnClusterSuite doesn't test "yarn cluster" mode correctly. This pull request fixes it. ## How was this patch tested? Unit test (If this patch involves UI changes, please attach a screenshot; otherwise, remove this) Author: peng.zhang <peng.zhang@xiaomi.com> Closes #13836 from renozhang/SPARK-16125-test-yarn-cluster-mode.
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala3
-rw-r--r--python/pyspark/context.py4
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala2
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala2
4 files changed, 3 insertions, 8 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index e3a8e83f3e..df279b5a37 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -754,7 +754,8 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
test("isDynamicAllocationEnabled") {
val conf = new SparkConf()
- conf.set("spark.master", "yarn-client")
+ conf.set("spark.master", "yarn")
+ conf.set("spark.submit.deployMode", "client")
assert(Utils.isDynamicAllocationEnabled(conf) === false)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.dynamicAllocation.enabled", "false")) === false)
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index aec0215b40..7217a9907a 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -155,10 +155,6 @@ class SparkContext(object):
self.appName = self._conf.get("spark.app.name")
self.sparkHome = self._conf.get("spark.home", None)
- # Let YARN know it's a pyspark app, so it distributes needed libraries.
- if self.master == "yarn-client":
- self._conf.set("spark.yarn.isPython", "true")
-
for (k, v) in self._conf.getAll():
if k.startswith("spark.executorEnv."):
varName = k[len("spark.executorEnv."):]
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 8fcab386ec..e871004173 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -943,8 +943,6 @@ class SparkILoop(
})
private def process(settings: Settings): Boolean = savingContextLoader {
- if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
-
this.settings = settings
createInterpreter()
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 4ce33e0e85..6b20dea590 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -312,7 +312,7 @@ private object YarnClusterDriver extends Logging with Matchers {
// If we are running in yarn-cluster mode, verify that driver logs links and present and are
// in the expected format.
- if (conf.get("spark.master") == "yarn-cluster") {
+ if (conf.get("spark.submit.deployMode") == "cluster") {
assert(listener.driverLogs.nonEmpty)
val driverLogs = listener.driverLogs.get
assert(driverLogs.size === 2)