aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-11-11 11:16:39 -0800
committerMichael Armbrust <michael@databricks.com>2015-11-11 11:16:39 -0800
commit529a1d3380c4c23fed068ad05a6376162c4b76d6 (patch)
tree3b18b3ab42840c8893b4e5ce9c2fb8d99f2264cf /project
parente71ba56586ba64da18f412bc0e0263777c46ac4a (diff)
downloadspark-529a1d3380c4c23fed068ad05a6376162c4b76d6.tar.gz
spark-529a1d3380c4c23fed068ad05a6376162c4b76d6.tar.bz2
spark-529a1d3380c4c23fed068ad05a6376162c4b76d6.zip
[SPARK-6152] Use shaded ASM5 to support closure cleaning of Java 8 compiled classes
This patch modifies Spark's closure cleaner (and a few other places) to use ASM 5, which is necessary in order to support cleaning of closures that were compiled by Java 8. In order to avoid ASM dependency conflicts, Spark excludes ASM from all of its dependencies and uses a shaded version of ASM 4 that comes from `reflectasm` (see [SPARK-782](https://issues.apache.org/jira/browse/SPARK-782) and #232). This patch updates Spark to use a shaded version of ASM 5.0.4 that was published by the Apache XBean project; the POM used to create the shaded artifact can be found at https://github.com/apache/geronimo-xbean/blob/xbean-4.4/xbean-asm5-shaded/pom.xml. http://movingfulcrum.tumblr.com/post/80826553604/asm-framework-50-the-missing-migration-guide was a useful resource while upgrading the code to use the new ASM5 opcodes. I also added a new regression tests in the `java8-tests` subproject; the existing tests were insufficient to catch this bug, which only affected Scala 2.11 user code which was compiled targeting Java 8. Author: Josh Rosen <joshrosen@databricks.com> Closes #9512 from JoshRosen/SPARK-6152.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala26
1 files changed, 24 insertions, 2 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index b7c6192243..570c9e50ed 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -57,6 +57,9 @@ object BuildCommons {
val sparkHome = buildLocation
val testTempDir = s"$sparkHome/target/tmp"
+
+ val javacJVMVersion = settingKey[String]("source and target JVM version for javac")
+ val scalacJVMVersion = settingKey[String]("source and target JVM version for scalac")
}
object SparkBuild extends PomBuild {
@@ -154,9 +157,17 @@ object SparkBuild extends PomBuild {
if (major.toInt >= 1 && minor.toInt >= 8) Seq("-Xdoclint:all", "-Xdoclint:-missing") else Seq.empty
},
- javacOptions in Compile ++= Seq("-encoding", "UTF-8"),
+ javacJVMVersion := "1.7",
+ scalacJVMVersion := "1.7",
+
+ javacOptions in Compile ++= Seq(
+ "-encoding", "UTF-8",
+ "-source", javacJVMVersion.value,
+ "-target", javacJVMVersion.value
+ ),
scalacOptions in Compile ++= Seq(
+ s"-target:jvm-${scalacJVMVersion.value}",
"-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath // Required for relative source links in scaladoc
),
@@ -241,8 +252,9 @@ object SparkBuild extends PomBuild {
enable(Flume.settings)(streamingFlumeSink)
- enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
+ enable(Java8TestSettings.settings)(java8Tests)
+ enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
/**
* Adds the ability to run the spark shell directly from SBT without building an assembly
@@ -591,6 +603,16 @@ object Unidoc {
)
}
+object Java8TestSettings {
+ import BuildCommons._
+
+ lazy val settings = Seq(
+ javacJVMVersion := "1.8",
+ // Targeting Java 8 bytecode is only supported in Scala 2.11.4 and higher:
+ scalacJVMVersion := (if (System.getProperty("scala-2.11") == "true") "1.8" else "1.7")
+ )
+}
+
object TestSettings {
import BuildCommons._