diff options
author | Patrick Wendell <pwendell@gmail.com> | 2014-06-12 15:43:32 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-06-12 15:43:32 -0700 |
commit | 1c04652c8f18566baafb13dbae355f8ad2ad8d37 (patch) | |
tree | ea88c5147a602b2dfb8cfaa4de6c436716fac508 /project | |
parent | ecde5b837534b11d365fcab78089820990b815cf (diff) | |
download | spark-1c04652c8f18566baafb13dbae355f8ad2ad8d37.tar.gz spark-1c04652c8f18566baafb13dbae355f8ad2ad8d37.tar.bz2 spark-1c04652c8f18566baafb13dbae355f8ad2ad8d37.zip |
SPARK-1843: Replace assemble-deps with env variable.
(This change is actually small, I moved some logic into
compute-classpath that was previously in spark-class).
Assemble deps has existed for a while to allow developers to
run local code with new changes quickly. When I'm developing I
typically use a simpler approach which just prepends the Spark
classes to the classpath before the assembly jar. This is well
defined in the JVM and the Spark classes take precedence over those
in the assembly.
This approach is portable across both builds which is the main reason I'd
like to switch to it. It's also a bit easier to toggle on and off quickly.
The way you use this is the following:
```
$ ./bin/spark-shell # Use spark with the normal assembly
$ export SPARK_PREPEND_CLASSES=true
$ ./bin/spark-shell # Now it's using compiled classes
$ unset SPARK_PREPEND_CLASSES
$ ./bin/spark-shell # Back to normal
```
Author: Patrick Wendell <pwendell@gmail.com>
Closes #877 from pwendell/assemble-deps and squashes the following commits:
8a11345 [Patrick Wendell] Merge remote-tracking branch 'apache/master' into assemble-deps
faa3168 [Patrick Wendell] Adding a warning for compatibility
3f151a7 [Patrick Wendell] Small fix
bbfb73c [Patrick Wendell] Review feedback
328e9f8 [Patrick Wendell] SPARK-1843: Replace assemble-deps with env variable.
Diffstat (limited to 'project')
-rw-r--r-- | project/SparkBuild.scala | 16 |
1 files changed, 12 insertions, 4 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index ecd9d70680..8b4885d3bb 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -90,7 +90,16 @@ object SparkBuild extends Build { lazy val assemblyProj = Project("assembly", file("assembly"), settings = assemblyProjSettings) .dependsOn(core, graphx, bagel, mllib, streaming, repl, sql) dependsOn(maybeYarn: _*) dependsOn(maybeHive: _*) dependsOn(maybeGanglia: _*) - lazy val assembleDeps = TaskKey[Unit]("assemble-deps", "Build assembly of dependencies and packages Spark projects") + lazy val assembleDepsTask = TaskKey[Unit]("assemble-deps") + lazy val assembleDeps = assembleDepsTask := { + println() + println("**** NOTE ****") + println("'sbt/sbt assemble-deps' is no longer supported.") + println("Instead create a normal assembly and:") + println(" export SPARK_PREPEND_CLASSES=1 (toggle on)") + println(" unset SPARK_PREPEND_CLASSES (toggle off)") + println() + } // A configuration to set an alternative publishLocalConfiguration lazy val MavenCompile = config("m2r") extend(Compile) @@ -373,6 +382,7 @@ object SparkBuild extends Build { "net.sf.py4j" % "py4j" % "0.8.1" ), libraryDependencies ++= maybeAvro, + assembleDeps, previousArtifact := sparkPreviousArtifact("spark-core") ) @@ -584,9 +594,7 @@ object SparkBuild extends Build { def assemblyProjSettings = sharedSettings ++ Seq( name := "spark-assembly", - assembleDeps in Compile <<= (packageProjects.map(packageBin in Compile in _) ++ Seq(packageDependency in Compile)).dependOn, - jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" }, - jarName in packageDependency <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + "-deps.jar" } + jarName in assembly <<= version map { v => "spark-assembly-" + v + "-hadoop" + hadoopVersion + ".jar" } ) ++ assemblySettings ++ extraAssemblySettings def extraAssemblySettings() = Seq( |