aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-04-24 08:42:30 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-04-24 08:42:30 -0700
commit118a6c76f56188edc72dba3f436587c124afc2ae (patch)
tree3bac5a0f807ef29b7fc8e876e20807014386a458 /project
parent17e076de800ea0d4c55f2bd657348641f6f9c55b (diff)
parent3b594a4e3b94de49a09dc679a30d857e3f41df69 (diff)
downloadspark-118a6c76f56188edc72dba3f436587c124afc2ae.tar.gz
spark-118a6c76f56188edc72dba3f436587c124afc2ae.tar.bz2
spark-118a6c76f56188edc72dba3f436587c124afc2ae.zip
Merge pull request #575 from mridulm/master
Manual merge of yarn branch to trunk
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala46
1 files changed, 40 insertions, 6 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index f0b371b2cf..b3f410bfa6 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -1,3 +1,4 @@
+
import sbt._
import sbt.Classpaths.publishTask
import Keys._
@@ -10,12 +11,19 @@ import twirl.sbt.TwirlPlugin._
object SparkBuild extends Build {
// Hadoop version to build against. For example, "0.20.2", "0.20.205.0", or
// "1.0.4" for Apache releases, or "0.20.2-cdh3u5" for Cloudera Hadoop.
- val HADOOP_VERSION = "1.0.4"
- val HADOOP_MAJOR_VERSION = "1"
+ //val HADOOP_VERSION = "1.0.4"
+ //val HADOOP_MAJOR_VERSION = "1"
// For Hadoop 2 versions such as "2.0.0-mr1-cdh4.1.1", set the HADOOP_MAJOR_VERSION to "2"
//val HADOOP_VERSION = "2.0.0-mr1-cdh4.1.1"
//val HADOOP_MAJOR_VERSION = "2"
+ //val HADOOP_YARN = false
+
+ // For Hadoop 2 YARN support
+ // val HADOOP_VERSION = "0.23.7"
+ val HADOOP_VERSION = "2.0.2-alpha"
+ val HADOOP_MAJOR_VERSION = "2"
+ val HADOOP_YARN = true
lazy val root = Project("root", file("."), settings = rootSettings) aggregate(core, repl, examples, bagel, streaming)
@@ -40,7 +48,6 @@ object SparkBuild extends Build {
scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"),
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
transitiveClassifiers in Scope.GlobalScope := Seq("sources"),
testListeners <<= target.map(t => Seq(new eu.henkelmann.sbt.JUnitXmlTestsListener(t.getAbsolutePath))),
@@ -99,6 +106,7 @@ object SparkBuild extends Build {
*/
libraryDependencies ++= Seq(
+ "io.netty" % "netty" % "3.5.3.Final",
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
@@ -130,12 +138,13 @@ object SparkBuild extends Build {
),
libraryDependencies ++= Seq(
+ "io.netty" % "netty" % "3.5.3.Final",
"com.google.guava" % "guava" % "11.0.1",
"log4j" % "log4j" % "1.2.16",
"org.slf4j" % "slf4j-api" % slf4jVersion,
"org.slf4j" % "slf4j-log4j12" % slf4jVersion,
+ "commons-daemon" % "commons-daemon" % "1.0.10",
"com.ning" % "compress-lzf" % "0.8.4",
- "org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
"asm" % "asm-all" % "3.3.1",
"com.google.protobuf" % "protobuf-java" % "2.4.1",
"de.javakaffee" % "kryo-serializers" % "0.22",
@@ -148,8 +157,32 @@ object SparkBuild extends Build {
"cc.spray" % "spray-server" % "1.0-M2.1",
"cc.spray" % "spray-json_2.9.2" % "1.1.1",
"org.apache.mesos" % "mesos" % "0.9.0-incubating"
- ) ++ (if (HADOOP_MAJOR_VERSION == "2") Some("org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION) else None).toSeq,
- unmanagedSourceDirectories in Compile <+= baseDirectory{ _ / ("src/hadoop" + HADOOP_MAJOR_VERSION + "/scala") }
+ ) ++ (
+ if (HADOOP_MAJOR_VERSION == "2") {
+ if (HADOOP_YARN) {
+ Seq(
+ // Exclude rule required for all ?
+ "org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
+ "org.apache.hadoop" % "hadoop-yarn-api" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
+ "org.apache.hadoop" % "hadoop-yarn-common" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
+ "org.apache.hadoop" % "hadoop-yarn-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") )
+ )
+ } else {
+ Seq(
+ "org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
+ "org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") )
+ )
+ }
+ } else {
+ Seq("org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ) )
+ }),
+ unmanagedSourceDirectories in Compile <+= baseDirectory{ _ /
+ ( if (HADOOP_YARN && HADOOP_MAJOR_VERSION == "2") {
+ "src/hadoop2-yarn/scala"
+ } else {
+ "src/hadoop" + HADOOP_MAJOR_VERSION + "/scala"
+ } )
+ }
) ++ assemblySettings ++ extraAssemblySettings ++ Twirl.settings
def rootSettings = sharedSettings ++ Seq(
@@ -181,6 +214,7 @@ object SparkBuild extends Build {
def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq(
mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
+ case m if m.toLowerCase.matches("meta-inf/.*\\.sf$") => MergeStrategy.discard
case "reference.conf" => MergeStrategy.concat
case _ => MergeStrategy.first
}