aboutsummaryrefslogtreecommitdiff
path: root/yarn/src/test/scala/org
diff options
context:
space:
mode:
authorjerryshao <sshao@hortonworks.com>2016-03-31 10:27:33 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-31 10:27:33 -0700
commit3b3cc76004438a942ecea752db39f3a904a52462 (patch)
tree7e787aa82c5af237445c383410e010ad70903f07 /yarn/src/test/scala/org
parent208fff3ac87f200fd4e6f0407d70bf81cf8c556f (diff)
downloadspark-3b3cc76004438a942ecea752db39f3a904a52462.tar.gz
spark-3b3cc76004438a942ecea752db39f3a904a52462.tar.bz2
spark-3b3cc76004438a942ecea752db39f3a904a52462.zip
[SPARK-14062][YARN] Fix log4j and upload metrics.properties automatically with distributed cache
## What changes were proposed in this pull request? 1. Currently log4j which uses distributed cache only adds to AM's classpath, not executor's, this is introduced in #9118, which breaks the original meaning of that PR, so here add log4j file to the classpath of both AM and executors. 2. Automatically upload metrics.properties to distributed cache, so that it could be used by remote driver and executors implicitly. ## How was this patch tested? Unit test and integration test is done. Author: jerryshao <sshao@hortonworks.com> Closes #11885 from jerryshao/SPARK-14062.
Diffstat (limited to 'yarn/src/test/scala/org')
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala7
1 files changed, 3 insertions, 4 deletions
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index e3613a93ed..64723c361c 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -121,7 +121,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
val env = new MutableHashMap[String, String]()
val args = new ClientArguments(Array("--jar", USER, "--addJars", ADDED), sparkConf)
- populateClasspath(args, conf, sparkConf, env, true)
+ populateClasspath(args, conf, sparkConf, env)
val cp = env("CLASSPATH").split(":|;|<CPS>")
s"$SPARK,$USER,$ADDED".split(",").foreach({ entry =>
@@ -178,8 +178,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
"/remotePath/1:/remotePath/2")
val env = new MutableHashMap[String, String]()
- populateClasspath(null, conf, sparkConf, env, false,
- extraClassPath = Some("/localPath/my1.jar"))
+ populateClasspath(null, conf, sparkConf, env, extraClassPath = Some("/localPath/my1.jar"))
val cp = classpath(env)
cp should contain ("/remotePath/spark.jar")
cp should contain ("/remotePath/my1.jar")
@@ -356,7 +355,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
private def classpath(client: Client): Array[String] = {
val env = new MutableHashMap[String, String]()
- populateClasspath(null, client.hadoopConf, client.sparkConf, env, false)
+ populateClasspath(null, client.hadoopConf, client.sparkConf, env)
classpath(env)
}