aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatei Zaharia <matei.zaharia@gmail.com>2013-08-23 11:20:20 -0700
committerMatei Zaharia <matei.zaharia@gmail.com>2013-08-23 11:20:20 -0700
commitd282c1ebbbe1aebbd409c06efedf95fb77833c35 (patch)
tree76353b182fc52cc88031265e1a1ea5a7754dc4f0
parent5a6ac128406674a76c971a521d0bcec5714559d3 (diff)
parenta9db1b7b6eb030feb7beee017d2eca402b73c67c (diff)
downloadspark-d282c1ebbbe1aebbd409c06efedf95fb77833c35.tar.gz
spark-d282c1ebbbe1aebbd409c06efedf95fb77833c35.tar.bz2
spark-d282c1ebbbe1aebbd409c06efedf95fb77833c35.zip
Merge pull request #860 from jey/sbt-ide-fixes
Fix IDE project generation under SBT
-rw-r--r--project/SparkBuild.scala17
-rw-r--r--project/plugins.sbt4
2 files changed, 14 insertions, 7 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 2a80f9c946..fbeae27707 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -57,14 +57,14 @@ object SparkBuild extends Build {
// Allows build configuration to be set through environment variables
lazy val hadoopVersion = scala.util.Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
- lazy val isYarnMode = scala.util.Properties.envOrNone("SPARK_WITH_YARN") match {
+ lazy val isYarnEnabled = scala.util.Properties.envOrNone("SPARK_WITH_YARN") match {
case None => DEFAULT_WITH_YARN
case Some(v) => v.toBoolean
}
// Conditionally include the yarn sub-project
- lazy val maybeYarn = if(isYarnMode) Seq[ClasspathDependency](yarn) else Seq[ClasspathDependency]()
- lazy val maybeYarnRef = if(isYarnMode) Seq[ProjectReference](yarn) else Seq[ProjectReference]()
+ lazy val maybeYarn = if(isYarnEnabled) Seq[ClasspathDependency](yarn) else Seq[ClasspathDependency]()
+ lazy val maybeYarnRef = if(isYarnEnabled) Seq[ProjectReference](yarn) else Seq[ProjectReference]()
lazy val allProjects = Seq[ProjectReference](core, repl, examples, bagel, streaming, mllib, tools) ++ maybeYarnRef
def sharedSettings = Defaults.defaultSettings ++ Seq(
@@ -253,7 +253,14 @@ object SparkBuild extends Build {
) ++ assemblySettings ++ extraAssemblySettings
def yarnSettings = sharedSettings ++ Seq(
- name := "spark-yarn",
+ name := "spark-yarn"
+ ) ++ extraYarnSettings ++ assemblySettings ++ extraAssemblySettings
+
+ // Conditionally include the YARN dependencies because some tools look at all sub-projects and will complain
+ // if we refer to nonexistent dependencies (e.g. hadoop-yarn-api from a Hadoop version without YARN).
+ def extraYarnSettings = if(isYarnEnabled) yarnEnabledSettings else Seq()
+
+ def yarnEnabledSettings = Seq(
libraryDependencies ++= Seq(
// Exclude rule required for all ?
"org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm),
@@ -261,7 +268,7 @@ object SparkBuild extends Build {
"org.apache.hadoop" % "hadoop-yarn-common" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm),
"org.apache.hadoop" % "hadoop-yarn-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm)
)
- ) ++ assemblySettings ++ extraAssemblySettings
+ )
def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq(
mergeStrategy in assembly := {
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 1b0f879b94..783b40d4f5 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -6,9 +6,9 @@ resolvers += "Spray Repository" at "http://repo.spray.cc/"
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.8.5")
-addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.1")
+addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")
-addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.2.0")
+addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.5.1")
// For Sonatype publishing
//resolvers += Resolver.url("sbt-plugin-releases", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)