aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2014-04-18 10:03:15 -0700
committerReynold Xin <rxin@apache.org>2014-04-18 10:03:15 -0700
commitaa17f022c59af02b04b977da9017671ef14d664a (patch)
treee6f0a3248a8b7e8d5b880582d909438d2453d5f5
parent89f47434e2a6c2f8b80c44d08f866d3a8b8e85c3 (diff)
downloadspark-aa17f022c59af02b04b977da9017671ef14d664a.tar.gz
spark-aa17f022c59af02b04b977da9017671ef14d664a.tar.bz2
spark-aa17f022c59af02b04b977da9017671ef14d664a.zip
[SPARK-1520] remove fastutil from dependencies
A quick fix for https://issues.apache.org/jira/browse/SPARK-1520 By excluding fastutil, we bring the number of files in the assembly jar back under 65536, so Java 7 won't create the assembly jar in zip64 format, which cannot be read by Java 6. With this change, the assembly jar now has about 60000 entries (58000 files), tested with both sbt and maven. Author: Xiangrui Meng <meng@databricks.com> Closes #437 from mengxr/remove-fastutil and squashes the following commits: 00f9beb [Xiangrui Meng] remove fastutil from dependencies
-rw-r--r--pom.xml7
-rw-r--r--project/SparkBuild.scala4
2 files changed, 9 insertions, 2 deletions
diff --git a/pom.xml b/pom.xml
index cd204376de..4ff18afa22 100644
--- a/pom.xml
+++ b/pom.xml
@@ -263,6 +263,13 @@
<groupId>com.clearspring.analytics</groupId>
<artifactId>stream</artifactId>
<version>2.5.1</version>
+ <exclusions>
+ <!-- Only HyperLogLog is used, which doesn't depend on fastutil -->
+ <exclusion>
+ <groupId>it.unimi.dsi</groupId>
+ <artifactId>fastutil</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<!-- In theory we need not directly depend on protobuf since Spark does not directly
use it. However, when building with Hadoop/YARN 2.2 Maven doesn't correctly bump
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4f5a3a224f..33f9d644ca 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -304,7 +304,7 @@ object SparkBuild extends Build {
val excludeHadoop = ExclusionRule(organization = "org.apache.hadoop")
val excludeCurator = ExclusionRule(organization = "org.apache.curator")
val excludePowermock = ExclusionRule(organization = "org.powermock")
-
+ val excludeFastutil = ExclusionRule(organization = "it.unimi.dsi")
def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
@@ -343,7 +343,7 @@ object SparkBuild extends Build {
"com.twitter" %% "chill" % chillVersion excludeAll(excludeAsm),
"com.twitter" % "chill-java" % chillVersion excludeAll(excludeAsm),
"org.tachyonproject" % "tachyon" % "0.4.1-thrift" excludeAll(excludeHadoop, excludeCurator, excludeEclipseJetty, excludePowermock),
- "com.clearspring.analytics" % "stream" % "2.5.1",
+ "com.clearspring.analytics" % "stream" % "2.5.1" excludeAll(excludeFastutil),
"org.spark-project" % "pyrolite" % "2.0"
),
libraryDependencies ++= maybeAvro