From efd7eed3222799d66d4fcb68785142dc570c8150 Mon Sep 17 00:00:00 2001 From: Michael Armbrust Date: Tue, 19 Jan 2016 14:28:00 -0800 Subject: [BUILD] Runner for spark packages This is a convenience method added to the SBT build for developers, though if people think its useful we could consider adding a official script that runs using the assembly instead of compiling on demand. It simply compiles spark (without requiring an assembly), and invokes Spark Submit to download / run the package. Example Usage: ``` $ build/sbt > sparkPackage com.databricks:spark-sql-perf_2.10:0.2.4 com.databricks.spark.sql.perf.RunBenchmark --help ``` Author: Michael Armbrust Closes #10834 from marmbrus/sparkPackageRunner. --- project/SparkBuild.scala | 15 +++++++++++++++ 1 file changed, 15 insertions(+) (limited to 'project') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 4c34c888cf..06e561ae0d 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -274,6 +274,11 @@ object SparkBuild extends PomBuild { * Usage: `build/sbt sparkShell` */ val sparkShell = taskKey[Unit]("start a spark-shell.") + val sparkPackage = inputKey[Unit]( + s""" + |Download and run a spark package. + |Usage `builds/sbt "sparkPackage [args] + """.stripMargin) val sparkSql = taskKey[Unit]("starts the spark sql CLI.") enable(Seq( @@ -287,6 +292,16 @@ object SparkBuild extends PomBuild { (runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value }, + sparkPackage := { + import complete.DefaultParsers._ + val packages :: className :: otherArgs = spaceDelimited(" [args]").parsed.toList + val scalaRun = (runner in run).value + val classpath = (fullClasspath in Runtime).value + val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs + println(args) + scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log) + }, + javaOptions in Compile += "-Dspark.master=local", sparkSql := { -- cgit v1.2.3