aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala3
-rw-r--r--python/pyspark/context.py4
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala2
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala2
4 files changed, 3 insertions, 8 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index e3a8e83f3e..df279b5a37 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -754,7 +754,8 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
test("isDynamicAllocationEnabled") {
val conf = new SparkConf()
- conf.set("spark.master", "yarn-client")
+ conf.set("spark.master", "yarn")
+ conf.set("spark.submit.deployMode", "client")
assert(Utils.isDynamicAllocationEnabled(conf) === false)
assert(Utils.isDynamicAllocationEnabled(
conf.set("spark.dynamicAllocation.enabled", "false")) === false)
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index aec0215b40..7217a9907a 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -155,10 +155,6 @@ class SparkContext(object):
self.appName = self._conf.get("spark.app.name")
self.sparkHome = self._conf.get("spark.home", None)
- # Let YARN know it's a pyspark app, so it distributes needed libraries.
- if self.master == "yarn-client":
- self._conf.set("spark.yarn.isPython", "true")
-
for (k, v) in self._conf.getAll():
if k.startswith("spark.executorEnv."):
varName = k[len("spark.executorEnv."):]
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 8fcab386ec..e871004173 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -943,8 +943,6 @@ class SparkILoop(
})
private def process(settings: Settings): Boolean = savingContextLoader {
- if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
-
this.settings = settings
createInterpreter()
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 4ce33e0e85..6b20dea590 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -312,7 +312,7 @@ private object YarnClusterDriver extends Logging with Matchers {
// If we are running in yarn-cluster mode, verify that driver logs links and present and are
// in the expected format.
- if (conf.get("spark.master") == "yarn-cluster") {
+ if (conf.get("spark.submit.deployMode") == "cluster") {
assert(listener.driverLogs.nonEmpty)
val driverLogs = listener.driverLogs.get
assert(driverLogs.size === 2)