aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBurak Yavuz <brkyvz@gmail.com>2015-06-29 17:27:02 -0700
committerAndrew Or <andrew@databricks.com>2015-06-29 17:27:08 -0700
commitc0fbd6781eb47e930af0f1009779e36da85a6b65 (patch)
treebd2791a21eff28c0c777e82aaf9f42b9bf22e5a7
parentb2684557fa0d2ec14b7529324443c8154d81c348 (diff)
downloadspark-c0fbd6781eb47e930af0f1009779e36da85a6b65.tar.gz
spark-c0fbd6781eb47e930af0f1009779e36da85a6b65.tar.bz2
spark-c0fbd6781eb47e930af0f1009779e36da85a6b65.zip
[SPARK-8410] [SPARK-8475] remove previous ivy resolution when using spark-submit
This PR also includes re-ordering the order that repositories are used when resolving packages. User provided repositories will be prioritized. cc andrewor14 Author: Burak Yavuz <brkyvz@gmail.com> Closes #7089 from brkyvz/delete-prev-ivy-resolution and squashes the following commits: a21f95a [Burak Yavuz] remove previous ivy resolution when using spark-submit (cherry picked from commit d7f796da45d9a7c76ee4c29a9e0661ef76d8028a) Signed-off-by: Andrew Or <andrew@databricks.com>
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala37
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala6
2 files changed, 26 insertions, 17 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index fe7345927e..43631ee279 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -800,6 +800,20 @@ private[spark] object SparkSubmitUtils {
val cr = new ChainResolver
cr.setName("list")
+ val repositoryList = remoteRepos.getOrElse("")
+ // add any other remote repositories other than maven central
+ if (repositoryList.trim.nonEmpty) {
+ repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
+ val brr: IBiblioResolver = new IBiblioResolver
+ brr.setM2compatible(true)
+ brr.setUsepoms(true)
+ brr.setRoot(repo)
+ brr.setName(s"repo-${i + 1}")
+ cr.add(brr)
+ printStream.println(s"$repo added as a remote repository with the name: ${brr.getName}")
+ }
+ }
+
val localM2 = new IBiblioResolver
localM2.setM2compatible(true)
localM2.setRoot(m2Path.toURI.toString)
@@ -830,20 +844,6 @@ private[spark] object SparkSubmitUtils {
sp.setRoot("http://dl.bintray.com/spark-packages/maven")
sp.setName("spark-packages")
cr.add(sp)
-
- val repositoryList = remoteRepos.getOrElse("")
- // add any other remote repositories other than maven central
- if (repositoryList.trim.nonEmpty) {
- repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
- val brr: IBiblioResolver = new IBiblioResolver
- brr.setM2compatible(true)
- brr.setUsepoms(true)
- brr.setRoot(repo)
- brr.setName(s"repo-${i + 1}")
- cr.add(brr)
- printStream.println(s"$repo added as a remote repository with the name: ${brr.getName}")
- }
- }
cr
}
@@ -973,6 +973,15 @@ private[spark] object SparkSubmitUtils {
// A Module descriptor must be specified. Entries are dummy strings
val md = getModuleDescriptor
+ // clear ivy resolution from previous launches. The resolution file is usually at
+ // ~/.ivy2/org.apache.spark-spark-submit-parent-default.xml. In between runs, this file
+ // leads to confusion with Ivy when the files can no longer be found at the repository
+ // declared in that file/
+ val mdId = md.getModuleRevisionId
+ val previousResolution = new File(ivySettings.getDefaultCache,
+ s"${mdId.getOrganisation}-${mdId.getName}-$ivyConfName.xml")
+ if (previousResolution.exists) previousResolution.delete
+
md.setDefaultConf(ivyConfName)
// Add exclusion rules for Spark and Scala Library
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 8003b71d71..55594708d8 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -77,9 +77,9 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
assert(resolver2.getResolvers.size() === 7)
val expected = repos.split(",").map(r => s"$r/")
resolver2.getResolvers.toArray.zipWithIndex.foreach { case (resolver: AbstractResolver, i) =>
- if (i > 3) {
- assert(resolver.getName === s"repo-${i - 3}")
- assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i - 4))
+ if (i < 3) {
+ assert(resolver.getName === s"repo-${i + 1}")
+ assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i))
}
}
}