aboutsummaryrefslogtreecommitdiff
path: root/project/SparkBuild.scala
diff options
context:
space:
mode:
Diffstat (limited to 'project/SparkBuild.scala')
-rw-r--r--project/SparkBuild.scala22
1 files changed, 17 insertions, 5 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index b75ed13a78..a9fb741d75 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -16,6 +16,7 @@
*/
import java.io._
+import java.nio.file.Files
import scala.util.Properties
import scala.collection.JavaConverters._
@@ -135,8 +136,6 @@ object SparkBuild extends PomBuild {
.orElse(sys.props.get("java.home").map { p => new File(p).getParentFile().getAbsolutePath() })
.map(file),
incOptions := incOptions.value.withNameHashing(true),
- retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
publishMavenStyle := true,
unidocGenjavadocVersion := "0.9-spark0",
@@ -326,8 +325,6 @@ object OldDeps {
def oldDepsSettings() = Defaults.coreDefaultSettings ++ Seq(
name := "old-deps",
scalaVersion := "2.10.5",
- retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
"spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
@@ -404,6 +401,8 @@ object Assembly {
val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.")
+ val deployDatanucleusJars = taskKey[Unit]("Deploy datanucleus jars to the spark/lib_managed/jars directory")
+
lazy val settings = assemblySettings ++ Seq(
test in assembly := {},
hadoopVersion := {
@@ -429,7 +428,20 @@ object Assembly {
case m if m.toLowerCase.startsWith("meta-inf/services/") => MergeStrategy.filterDistinctLines
case "reference.conf" => MergeStrategy.concat
case _ => MergeStrategy.first
- }
+ },
+ deployDatanucleusJars := {
+ val jars: Seq[File] = (fullClasspath in assembly).value.map(_.data)
+ .filter(_.getPath.contains("org.datanucleus"))
+ var libManagedJars = new File(BuildCommons.sparkHome, "lib_managed/jars")
+ libManagedJars.mkdirs()
+ jars.foreach { jar =>
+ val dest = new File(libManagedJars, jar.getName)
+ if (!dest.exists()) {
+ Files.copy(jar.toPath, dest.toPath)
+ }
+ }
+ },
+ assembly <<= assembly.dependsOn(deployDatanucleusJars)
)
}