aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbin/compute-classpath.sh6
-rw-r--r--project/SparkBuild.scala8
2 files changed, 13 insertions, 1 deletions
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index c7819d4932..72955370d2 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -39,6 +39,12 @@ else
fi
CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
+CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
+CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
+CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
+CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
+CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes"
+
# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1
if [[ $SPARK_TESTING == 1 ]]; then
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes"
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d038a4f479..6d6a8ef992 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -56,6 +56,8 @@ object SparkBuild extends Build {
lazy val assemblyProj = Project("assembly", file("assembly"), settings = assemblyProjSettings)
.dependsOn(core, bagel, mllib, repl, streaming) dependsOn(maybeYarn: _*)
+ lazy val spark = TaskKey[Unit]("spark", "Build assembly of dependencies and spark packages")
+
// A configuration to set an alternative publishLocalConfiguration
lazy val MavenCompile = config("m2r") extend(Compile)
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
@@ -73,6 +75,8 @@ object SparkBuild extends Build {
lazy val allProjects = Seq[ProjectReference](
core, repl, examples, bagel, streaming, mllib, tools, assemblyProj) ++ maybeYarnRef
+ lazy val packageProjects = Seq[ProjectReference](core, repl, bagel, streaming, mllib, tools) ++ maybeYarnRef
+
def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.apache.spark",
version := "0.8.0-SNAPSHOT",
@@ -288,7 +292,9 @@ object SparkBuild extends Build {
def assemblyProjSettings = sharedSettings ++ Seq(
name := "spark-assembly",
- jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" }
+ spark in Compile <<= (packageProjects.map(packageBin in Compile in _) ++ Seq(packageDependency in Compile)).dependOn,
+ jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" },
+ jarName in packageDependency <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + "-deps.jar" }
) ++ assemblySettings ++ extraAssemblySettings
def extraAssemblySettings() = Seq(