aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Rosen <rosenville@gmail.com>2012-07-30 18:05:13 -0700
committerJosh Rosen <rosenville@gmail.com>2012-07-30 18:05:13 -0700
commit509b721d12c909d9298127637a9391bfef6e91b4 (patch)
tree4e5f7f51d638a7ba6414ece70a7fcadc0e826377 /project
parent3ee2530c0c40f7670151f55c05232728a12c23e2 (diff)
downloadspark-509b721d12c909d9298127637a9391bfef6e91b4.tar.gz
spark-509b721d12c909d9298127637a9391bfef6e91b4.tar.bz2
spark-509b721d12c909d9298127637a9391bfef6e91b4.zip
Fix Akka configuration in assembly jar.
This resolves an issue where running Spark from the assembly jar would cause a "No configuration setting found for key 'akka.version'" exception. This solution is from the Akka Team Blog: http://letitcrash.com/post/21025950392/
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala57
1 files changed, 55 insertions, 2 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 726d490738..3603d42a23 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -1,5 +1,7 @@
import sbt._
import Keys._
+import classpath.ClasspathUtilities.isArchive
+import java.io.FileOutputStream
import sbtassembly.Plugin._
import AssemblyKeys._
@@ -69,12 +71,12 @@ object SparkBuild extends Build {
"cc.spray" % "spray-can" % "1.0-M2.1",
"cc.spray" % "spray-server" % "1.0-M2.1"
)
- ) ++ assemblySettings ++ extraAssemblySettings
+ ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings
def replSettings = sharedSettings ++ Seq(
name := "spark-repl",
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
- ) ++ assemblySettings ++ extraAssemblySettings
+ ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings
def examplesSettings = sharedSettings ++ Seq(
name := "spark-examples"
@@ -82,6 +84,57 @@ object SparkBuild extends Build {
def bagelSettings = sharedSettings ++ Seq(name := "spark-bagel")
+ // Fix for "No configuration setting found for key 'akka.version'" exception
+ // when running Spark from the jar generated by the "assembly" task; see
+ // http://letitcrash.com/post/21025950392/howto-sbt-assembly-vs-reference-conf
+ lazy val merge = TaskKey[File]("merge-reference",
+ "merge all reference.conf")
+
+ lazy val mergeSettings: Seq[Project.Setting[_]] = Seq(
+ merge <<= (fullClasspath in assembly) map {
+ c =>
+ // collect from all elements of the full classpath
+ val (libs, dirs) =
+ c map (_.data) partition (isArchive)
+ // goal is to simply concatenate files here
+ val dest = file("reference.conf")
+ val out = new FileOutputStream(dest)
+ val append = IO.transfer(_: File, out)
+ try {
+ // first collect from managed sources
+ (dirs * "reference.conf").get foreach append
+ // then from dependency jars by unzipping and
+ // collecting reference.conf if present
+ for (lib <- libs) {
+ IO withTemporaryDirectory {
+ dir =>
+ IO.unzip(lib, dir, "reference.conf")
+ (dir * "reference.conf").get foreach append
+ }
+ }
+ // return merged file location as task result
+ dest
+ } finally {
+ out.close()
+ }
+ },
+
+ // get rid of the individual files from jars
+ excludedFiles in assembly <<=
+ (excludedFiles in assembly) {
+ (old) => (bases) =>
+ old(bases) ++ (bases flatMap (base =>
+ (base / "reference.conf").get))
+ },
+
+ // tell sbt-assembly to include our merged file
+ assembledMappings in assembly <<=
+ (assembledMappings in assembly, merge) map {
+ (old, merged) => (f) =>
+ old(f) :+(merged, "reference.conf")
+ }
+ )
+
def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq(
mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard