From 6999910b0c5ef26080f978be1e2bf065f0816ac9 Mon Sep 17 00:00:00 2001 From: Michael Armbrust Date: Sat, 17 Jan 2015 17:03:07 -0800 Subject: [SPARK-5096] Use sbt tasks instead of vals to get hadoop version This makes it possible to compile spark as an external `ProjectRef` where as now we throw a `FileNotFoundException` Author: Michael Armbrust Closes #3905 from marmbrus/effectivePom and squashes the following commits: fd63aae [Michael Armbrust] Use sbt tasks instead of vals to get hadoop version. --- project/SparkBuild.scala | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) (limited to 'project') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index b2c546da21..ded4b5443a 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -114,17 +114,6 @@ object SparkBuild extends PomBuild { override val userPropertiesMap = System.getProperties.toMap - // Handle case where hadoop.version is set via profile. - // Needed only because we read back this property in sbt - // when we create the assembly jar. - val pom = loadEffectivePom(new File("pom.xml"), - profiles = profiles, - userProps = userPropertiesMap) - if (System.getProperty("hadoop.version") == null) { - System.setProperty("hadoop.version", - pom.getProperties.get("hadoop.version").asInstanceOf[String]) - } - lazy val MavenCompile = config("m2r") extend(Compile) lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy") @@ -303,16 +292,15 @@ object Assembly { import sbtassembly.Plugin._ import AssemblyKeys._ + val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.") + lazy val settings = assemblySettings ++ Seq( test in assembly := {}, - jarName in assembly <<= (version, moduleName) map { (v, mName) => - if (mName.contains("network-yarn")) { - // This must match the same name used in maven (see network/yarn/pom.xml) - "spark-" + v + "-yarn-shuffle.jar" - } else { - mName + "-" + v + "-hadoop" + System.getProperty("hadoop.version") + ".jar" - } + hadoopVersion := { + sys.props.get("hadoop.version") + .getOrElse(SbtPomKeys.effectivePom.value.getProperties.get("hadoop.version").asInstanceOf[String]) }, + jarName in assembly := s"${moduleName.value}-${version.value}-hadoop${hadoopVersion.value}.jar", mergeStrategy in assembly := { case PathList("org", "datanucleus", xs @ _*) => MergeStrategy.discard case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard @@ -323,7 +311,6 @@ object Assembly { case _ => MergeStrategy.first } ) - } object Unidoc { -- cgit v1.2.3