aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2016-01-19 14:28:00 -0800
committerReynold Xin <rxin@databricks.com>2016-01-19 14:28:00 -0800
commitefd7eed3222799d66d4fcb68785142dc570c8150 (patch)
tree51065c80e2393a1698e40c5a9ea8ee0dea67cf6b /project
parentc6f971b4aeca7265ab374fa46c5c452461d9b6a7 (diff)
downloadspark-efd7eed3222799d66d4fcb68785142dc570c8150.tar.gz
spark-efd7eed3222799d66d4fcb68785142dc570c8150.tar.bz2
spark-efd7eed3222799d66d4fcb68785142dc570c8150.zip
[BUILD] Runner for spark packages
This is a convenience method added to the SBT build for developers, though if people think its useful we could consider adding a official script that runs using the assembly instead of compiling on demand. It simply compiles spark (without requiring an assembly), and invokes Spark Submit to download / run the package. Example Usage: ``` $ build/sbt > sparkPackage com.databricks:spark-sql-perf_2.10:0.2.4 com.databricks.spark.sql.perf.RunBenchmark --help ``` Author: Michael Armbrust <michael@databricks.com> Closes #10834 from marmbrus/sparkPackageRunner.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala15
1 files changed, 15 insertions, 0 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4c34c888cf..06e561ae0d 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -274,6 +274,11 @@ object SparkBuild extends PomBuild {
* Usage: `build/sbt sparkShell`
*/
val sparkShell = taskKey[Unit]("start a spark-shell.")
+ val sparkPackage = inputKey[Unit](
+ s"""
+ |Download and run a spark package.
+ |Usage `builds/sbt "sparkPackage <group:artifact:version> <MainClass> [args]
+ """.stripMargin)
val sparkSql = taskKey[Unit]("starts the spark sql CLI.")
enable(Seq(
@@ -287,6 +292,16 @@ object SparkBuild extends PomBuild {
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
},
+ sparkPackage := {
+ import complete.DefaultParsers._
+ val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
+ val scalaRun = (runner in run).value
+ val classpath = (fullClasspath in Runtime).value
+ val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs
+ println(args)
+ scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
+ },
+
javaOptions in Compile += "-Dspark.master=local",
sparkSql := {