aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBryan Cutler <cutlerb@gmail.com>2016-08-20 13:45:26 -0700
committerJosh Rosen <joshrosen@databricks.com>2016-08-20 13:45:26 -0700
commit9f37d4eac28dd179dd523fa7d645be97bb52af9c (patch)
tree6c929720d12d1c598316bd3c5aa960a23feb7b1b
parent9560c8d29542a5dcaaa07b7af9ef5ddcdbb5d14d (diff)
downloadspark-9f37d4eac28dd179dd523fa7d645be97bb52af9c.tar.gz
spark-9f37d4eac28dd179dd523fa7d645be97bb52af9c.tar.bz2
spark-9f37d4eac28dd179dd523fa7d645be97bb52af9c.zip
[SPARK-12666][CORE] SparkSubmit packages fix for when 'default' conf doesn't exist in dependent module
## What changes were proposed in this pull request? Adding a "(runtime)" to the dependency configuration will set a fallback configuration to be used if the requested one is not found. E.g. with the setting "default(runtime)", Ivy will look for the conf "default" in the module ivy file and if not found will look for the conf "runtime". This can help with the case when using "sbt publishLocal" which does not write a "default" conf in the published ivy.xml file. ## How was this patch tested? used spark-submit with --packages option for a package published locally with no default conf, and a package resolved from Maven central. Author: Bryan Cutler <cutlerb@gmail.com> Closes #13428 from BryanCutler/fallback-package-conf-SPARK-12666.
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala11
1 files changed, 7 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 9feafc99ac..7b6d5a394b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -897,9 +897,12 @@ private[spark] object SparkSubmitUtils {
val localIvyRoot = new File(ivySettings.getDefaultIvyUserDir, "local")
localIvy.setLocal(true)
localIvy.setRepository(new FileRepository(localIvyRoot))
- val ivyPattern = Seq("[organisation]", "[module]", "[revision]", "[type]s",
- "[artifact](-[classifier]).[ext]").mkString(File.separator)
- localIvy.addIvyPattern(localIvyRoot.getAbsolutePath + File.separator + ivyPattern)
+ val ivyPattern = Seq(localIvyRoot.getAbsolutePath, "[organisation]", "[module]", "[revision]",
+ "ivys", "ivy.xml").mkString(File.separator)
+ localIvy.addIvyPattern(ivyPattern)
+ val artifactPattern = Seq(localIvyRoot.getAbsolutePath, "[organisation]", "[module]",
+ "[revision]", "[type]s", "[artifact](-[classifier]).[ext]").mkString(File.separator)
+ localIvy.addArtifactPattern(artifactPattern)
localIvy.setName("local-ivy-cache")
cr.add(localIvy)
@@ -944,7 +947,7 @@ private[spark] object SparkSubmitUtils {
artifacts.foreach { mvn =>
val ri = ModuleRevisionId.newInstance(mvn.groupId, mvn.artifactId, mvn.version)
val dd = new DefaultDependencyDescriptor(ri, false, false)
- dd.addDependencyConfiguration(ivyConfName, ivyConfName)
+ dd.addDependencyConfiguration(ivyConfName, ivyConfName + "(runtime)")
// scalastyle:off println
printStream.println(s"${dd.getDependencyId} added as a dependency")
// scalastyle:on println