From 5d7b591cfe14177f083814fe3e81745c5d279810 Mon Sep 17 00:00:00 2001 From: Matei Zaharia Date: Mon, 25 Feb 2013 19:34:32 -0800 Subject: Pass a code JAR to SparkContext in our examples. Fixes SPARK-594. --- run2.cmd | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'run2.cmd') diff --git a/run2.cmd b/run2.cmd index 705a4d1ff6..f34869f1b1 100644 --- a/run2.cmd +++ b/run2.cmd @@ -62,6 +62,16 @@ set CLASSPATH=%CLASSPATH%;%FWDIR%repl\lib\* set CLASSPATH=%CLASSPATH%;%FWDIR%python\lib\* set CLASSPATH=%CLASSPATH%;%BAGEL_DIR%\target\scala-%SCALA_VERSION%\classes +rem Figure out the JAR file that our examples were packaged into. +rem First search in the build path from SBT: +for /D %%d in ("%EXAMPLES_DIR%/target/scala-%SCALA_VERSION%/spark-examples*.jar") do ( + set SPARK_EXAMPLES_JAR=%%d +) +rem Then search in the build path from Maven: +for /D %%d in ("%EXAMPLES_DIR%/target/spark-examples*hadoop*.jar") do ( + set SPARK_EXAMPLES_JAR=%%d +) + rem Figure out whether to run our class with java or with the scala launcher. rem In most cases, we'd prefer to execute our process with java because scala rem creates a shell script as the parent of its Java process, which makes it -- cgit v1.2.3