aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-11-10 10:14:19 -0800
committerMichael Armbrust <michael@databricks.com>2015-11-10 10:14:19 -0800
commit689386b1c60997e4505749915f7005a52c207de2 (patch)
treef3d1184de25b306519883fc8c00fb08948a3939c /project
parenta81f47ff7498e7063c855ccf75bba81ab101b43e (diff)
downloadspark-689386b1c60997e4505749915f7005a52c207de2.tar.gz
spark-689386b1c60997e4505749915f7005a52c207de2.tar.bz2
spark-689386b1c60997e4505749915f7005a52c207de2.zip
[SPARK-7841][BUILD] Stop using retrieveManaged to retrieve dependencies in SBT
This patch modifies Spark's SBT build so that it no longer uses `retrieveManaged` / `lib_managed` to store its dependencies. The motivations for this change are nicely described on the JIRA ticket ([SPARK-7841](https://issues.apache.org/jira/browse/SPARK-7841)); my personal interest in doing this stems from the fact that `lib_managed` has caused me some pain while debugging dependency issues in another PR of mine. Removing our use of `lib_managed` would be trivial except for one snag: the Datanucleus JARs, required by Spark SQL's Hive integration, cannot be included in assembly JARs due to problems with merging OSGI `plugin.xml` files. As a result, several places in the packaging and deployment pipeline assume that these Datanucleus JARs are copied to `lib_managed/jars`. In the interest of maintaining compatibility, I have chosen to retain the `lib_managed/jars` directory _only_ for these Datanucleus JARs and have added custom code to `SparkBuild.scala` to automatically copy those JARs to that folder as part of the `assembly` task. `dev/mima` also depended on `lib_managed` in a hacky way in order to set classpaths when generating MiMa excludes; I've updated this to obtain the classpaths directly from SBT instead. /cc dragos marmbrus pwendell srowen Author: Josh Rosen <joshrosen@databricks.com> Closes #9575 from JoshRosen/SPARK-7841.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala22
1 files changed, 17 insertions, 5 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index b75ed13a78..a9fb741d75 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -16,6 +16,7 @@
*/
import java.io._
+import java.nio.file.Files
import scala.util.Properties
import scala.collection.JavaConverters._
@@ -135,8 +136,6 @@ object SparkBuild extends PomBuild {
.orElse(sys.props.get("java.home").map { p => new File(p).getParentFile().getAbsolutePath() })
.map(file),
incOptions := incOptions.value.withNameHashing(true),
- retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
publishMavenStyle := true,
unidocGenjavadocVersion := "0.9-spark0",
@@ -326,8 +325,6 @@ object OldDeps {
def oldDepsSettings() = Defaults.coreDefaultSettings ++ Seq(
name := "old-deps",
scalaVersion := "2.10.5",
- retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
"spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
@@ -404,6 +401,8 @@ object Assembly {
val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.")
+ val deployDatanucleusJars = taskKey[Unit]("Deploy datanucleus jars to the spark/lib_managed/jars directory")
+
lazy val settings = assemblySettings ++ Seq(
test in assembly := {},
hadoopVersion := {
@@ -429,7 +428,20 @@ object Assembly {
case m if m.toLowerCase.startsWith("meta-inf/services/") => MergeStrategy.filterDistinctLines
case "reference.conf" => MergeStrategy.concat
case _ => MergeStrategy.first
- }
+ },
+ deployDatanucleusJars := {
+ val jars: Seq[File] = (fullClasspath in assembly).value.map(_.data)
+ .filter(_.getPath.contains("org.datanucleus"))
+ var libManagedJars = new File(BuildCommons.sparkHome, "lib_managed/jars")
+ libManagedJars.mkdirs()
+ jars.foreach { jar =>
+ val dest = new File(libManagedJars, jar.getName)
+ if (!dest.exists()) {
+ Files.copy(jar.toPath, dest.toPath)
+ }
+ }
+ },
+ assembly <<= assembly.dependsOn(deployDatanucleusJars)
)
}