diff options
Diffstat (limited to 'project/SparkBuild.scala')
-rw-r--r-- | project/SparkBuild.scala | 45 |
1 files changed, 22 insertions, 23 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 5d62b688b9..b32480b164 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -57,11 +57,12 @@ object BuildCommons { Seq("yarn", "java8-tests", "ganglia-lgpl", "streaming-kinesis-asl", "docker-integration-tests").map(ProjectRef(buildLocation, _)) - val assemblyProjects@Seq(assembly, networkYarn, streamingFlumeAssembly, streamingKafkaAssembly, streamingKinesisAslAssembly) = - Seq("assembly", "network-yarn", "streaming-flume-assembly", "streaming-kafka-assembly", "streaming-kinesis-asl-assembly") + val assemblyProjects@Seq(networkYarn, streamingFlumeAssembly, streamingKafkaAssembly, streamingKinesisAslAssembly) = + Seq("network-yarn", "streaming-flume-assembly", "streaming-kafka-assembly", "streaming-kinesis-asl-assembly") .map(ProjectRef(buildLocation, _)) - val copyJarsProjects@Seq(examples) = Seq("examples").map(ProjectRef(buildLocation, _)) + val copyJarsProjects@Seq(assembly, examples) = Seq("assembly", "examples") + .map(ProjectRef(buildLocation, _)) val tools = ProjectRef(buildLocation, "tools") // Root project. @@ -263,8 +264,14 @@ object SparkBuild extends PomBuild { /* Unsafe settings */ enable(Unsafe.settings)(unsafe) - /* Set up tasks to copy dependencies during packaging. */ - copyJarsProjects.foreach(enable(CopyDependencies.settings)) + /* + * Set up tasks to copy dependencies during packaging. This step can be disabled in the command + * line, so that dev/mima can run without trying to copy these files again and potentially + * causing issues. + */ + if (!"false".equals(System.getProperty("copyDependencies"))) { + copyJarsProjects.foreach(enable(CopyDependencies.settings)) + } /* Enable Assembly for all assembly projects */ assemblyProjects.foreach(enable(Assembly.settings)) @@ -477,8 +484,6 @@ object Assembly { val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.") - val deployDatanucleusJars = taskKey[Unit]("Deploy datanucleus jars to the spark/lib_managed/jars directory") - lazy val settings = assemblySettings ++ Seq( test in assembly := {}, hadoopVersion := { @@ -497,27 +502,13 @@ object Assembly { s"${mName}-test-${v}.jar" }, mergeStrategy in assembly := { - case PathList("org", "datanucleus", xs @ _*) => MergeStrategy.discard case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard case "log4j.properties" => MergeStrategy.discard case m if m.toLowerCase.startsWith("meta-inf/services/") => MergeStrategy.filterDistinctLines case "reference.conf" => MergeStrategy.concat case _ => MergeStrategy.first - }, - deployDatanucleusJars := { - val jars: Seq[File] = (fullClasspath in assembly).value.map(_.data) - .filter(_.getPath.contains("org.datanucleus")) - var libManagedJars = new File(BuildCommons.sparkHome, "lib_managed/jars") - libManagedJars.mkdirs() - jars.foreach { jar => - val dest = new File(libManagedJars, jar.getName) - if (!dest.exists()) { - Files.copy(jar.toPath, dest.toPath) - } - } - }, - assembly <<= assembly.dependsOn(deployDatanucleusJars) + } ) } @@ -698,6 +689,13 @@ object Java8TestSettings { object TestSettings { import BuildCommons._ + private val scalaBinaryVersion = + if (System.getProperty("scala-2.10") == "true") { + "2.10" + } else { + "2.11" + } + lazy val settings = Seq ( // Fork new JVMs for tests and set Java options for those fork := true, @@ -707,6 +705,7 @@ object TestSettings { "SPARK_DIST_CLASSPATH" -> (fullClasspath in Test).value.files.map(_.getAbsolutePath).mkString(":").stripSuffix(":"), "SPARK_PREPEND_CLASSES" -> "1", + "SPARK_SCALA_VERSION" -> scalaBinaryVersion, "SPARK_TESTING" -> "1", "JAVA_HOME" -> sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))), javaOptions in Test += s"-Djava.io.tmpdir=$testTempDir", @@ -744,7 +743,7 @@ object TestSettings { // Make sure the test temp directory exists. resourceGenerators in Test <+= resourceManaged in Test map { outDir: File => if (!new File(testTempDir).isDirectory()) { - require(new File(testTempDir).mkdirs()) + require(new File(testTempDir).mkdirs(), s"Error creating temp directory $testTempDir.") } Seq[File]() }, |