aboutsummaryrefslogtreecommitdiff
path: root/yarn/src/test/scala/org
diff options
context:
space:
mode:
authorKishor Patil <kpatil@yahoo-inc.com>2016-11-08 12:13:09 -0600
committerTom Graves <tgraves@yahoo-inc.com>2016-11-08 12:13:09 -0600
commit245e5a2f80e3195b7f8a38b480b29bfc23af66bf (patch)
treeae9a6ac9dd7c6c801a81320576d288e91d45cf46 /yarn/src/test/scala/org
parent9c419698fe110a805570031cac3387a51957d9d1 (diff)
downloadspark-245e5a2f80e3195b7f8a38b480b29bfc23af66bf.tar.gz
spark-245e5a2f80e3195b7f8a38b480b29bfc23af66bf.tar.bz2
spark-245e5a2f80e3195b7f8a38b480b29bfc23af66bf.zip
[SPARK-18357] Fix yarn files/archive broken issue andd unit tests
## What changes were proposed in this pull request? The #15627 broke functionality with yarn --files --archives does not accept any files. This patch ensures that --files and --archives accept unique files. ## How was this patch tested? A. I added unit tests. B. Also, manually tested --files with --archives to throw exception if duplicate files are specified and continue if unique files are specified. Author: Kishor Patil <kpatil@yahoo-inc.com> Closes #15810 from kishorvpatil/SPARK18357.
Diffstat (limited to 'yarn/src/test/scala/org')
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala17
1 files changed, 17 insertions, 0 deletions
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index 06516c1baf..7deaf0af94 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -322,6 +322,23 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
intercept[IllegalArgumentException] {
clientArchives.prepareLocalResources(new Path(tempDirForArchives.getAbsolutePath()), Nil)
}
+
+ // Case 4: FILES_TO_DISTRIBUTE can have unique file.
+ val sparkConfFilesUniq = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath))
+ .set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath))
+
+ val clientFilesUniq = createClient(sparkConfFilesUniq)
+ val tempDirForFilesUniq = Utils.createTempDir()
+ clientFilesUniq.prepareLocalResources(new Path(tempDirForFilesUniq.getAbsolutePath()), Nil)
+
+ // Case 5: ARCHIVES_TO_DISTRIBUTE can have unique file.
+ val sparkConfArchivesUniq = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath))
+ .set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath))
+
+ val clientArchivesUniq = createClient(sparkConfArchivesUniq)
+ val tempDirArchivesUniq = Utils.createTempDir()
+ clientArchivesUniq.prepareLocalResources(new Path(tempDirArchivesUniq.getAbsolutePath()), Nil)
+
}
test("distribute local spark jars") {