aboutsummaryrefslogtreecommitdiff
path: root/project/SparkBuild.scala
diff options
context:
space:
mode:
authorJey Kottalam <jey@cs.berkeley.edu>2013-08-23 10:26:37 -0700
committerJey Kottalam <jey@cs.berkeley.edu>2013-08-23 10:26:37 -0700
commitb7f9e6374ae89568b5b7298d89825eaf0b33cc15 (patch)
tree974c2de45496d5cc70dabb5adebd0bf0d424222a /project/SparkBuild.scala
parent5a6ac128406674a76c971a521d0bcec5714559d3 (diff)
downloadspark-b7f9e6374ae89568b5b7298d89825eaf0b33cc15.tar.gz
spark-b7f9e6374ae89568b5b7298d89825eaf0b33cc15.tar.bz2
spark-b7f9e6374ae89568b5b7298d89825eaf0b33cc15.zip
Fix SBT generation of IDE project files
Diffstat (limited to 'project/SparkBuild.scala')
-rw-r--r--project/SparkBuild.scala17
1 files changed, 12 insertions, 5 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 2a80f9c946..fbeae27707 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -57,14 +57,14 @@ object SparkBuild extends Build {
// Allows build configuration to be set through environment variables
lazy val hadoopVersion = scala.util.Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
- lazy val isYarnMode = scala.util.Properties.envOrNone("SPARK_WITH_YARN") match {
+ lazy val isYarnEnabled = scala.util.Properties.envOrNone("SPARK_WITH_YARN") match {
case None => DEFAULT_WITH_YARN
case Some(v) => v.toBoolean
}
// Conditionally include the yarn sub-project
- lazy val maybeYarn = if(isYarnMode) Seq[ClasspathDependency](yarn) else Seq[ClasspathDependency]()
- lazy val maybeYarnRef = if(isYarnMode) Seq[ProjectReference](yarn) else Seq[ProjectReference]()
+ lazy val maybeYarn = if(isYarnEnabled) Seq[ClasspathDependency](yarn) else Seq[ClasspathDependency]()
+ lazy val maybeYarnRef = if(isYarnEnabled) Seq[ProjectReference](yarn) else Seq[ProjectReference]()
lazy val allProjects = Seq[ProjectReference](core, repl, examples, bagel, streaming, mllib, tools) ++ maybeYarnRef
def sharedSettings = Defaults.defaultSettings ++ Seq(
@@ -253,7 +253,14 @@ object SparkBuild extends Build {
) ++ assemblySettings ++ extraAssemblySettings
def yarnSettings = sharedSettings ++ Seq(
- name := "spark-yarn",
+ name := "spark-yarn"
+ ) ++ extraYarnSettings ++ assemblySettings ++ extraAssemblySettings
+
+ // Conditionally include the YARN dependencies because some tools look at all sub-projects and will complain
+ // if we refer to nonexistent dependencies (e.g. hadoop-yarn-api from a Hadoop version without YARN).
+ def extraYarnSettings = if(isYarnEnabled) yarnEnabledSettings else Seq()
+
+ def yarnEnabledSettings = Seq(
libraryDependencies ++= Seq(
// Exclude rule required for all ?
"org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm),
@@ -261,7 +268,7 @@ object SparkBuild extends Build {
"org.apache.hadoop" % "hadoop-yarn-common" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm),
"org.apache.hadoop" % "hadoop-yarn-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm)
)
- ) ++ assemblySettings ++ extraAssemblySettings
+ )
def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq(
mergeStrategy in assembly := {