aboutsummaryrefslogtreecommitdiff
path: root/bin/run-example2.cmd
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2014-05-19 15:02:35 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2014-05-19 15:02:35 -0700
commit7b70a7071894dd90ea1d0091542b3e13e7ef8d3a (patch)
treee24b0a208b0c2290e6f1b6a6beda520f36ed1fa3 /bin/run-example2.cmd
parentdf0aa8353ab6d3b19d838c6fa95a93a64948309f (diff)
downloadspark-7b70a7071894dd90ea1d0091542b3e13e7ef8d3a.tar.gz
spark-7b70a7071894dd90ea1d0091542b3e13e7ef8d3a.tar.bz2
spark-7b70a7071894dd90ea1d0091542b3e13e7ef8d3a.zip
[SPARK-1876] Windows fixes to deal with latest distribution layout changes
- Look for JARs in the right place - Launch examples the same way as on Unix - Load datanucleus JARs if they exist - Don't attempt to parse local paths as URIs in SparkSubmit, since paths with C:\ are not valid URIs - Also fixed POM exclusion rules for datanucleus (it wasn't properly excluding it, whereas SBT was) Author: Matei Zaharia <matei@databricks.com> Closes #819 from mateiz/win-fixes and squashes the following commits: d558f96 [Matei Zaharia] Fix comment 228577b [Matei Zaharia] Review comments d3b71c7 [Matei Zaharia] Properly exclude datanucleus files in Maven assembly 144af84 [Matei Zaharia] Update Windows scripts to match latest binary package layout
Diffstat (limited to 'bin/run-example2.cmd')
-rw-r--r--bin/run-example2.cmd51
1 files changed, 39 insertions, 12 deletions
diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd
index 40abb9af74..eadedd7fa6 100644
--- a/bin/run-example2.cmd
+++ b/bin/run-example2.cmd
@@ -30,7 +30,9 @@ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
rem Test that an argument was given
if not "x%1"=="x" goto arg_given
- echo Usage: run-example ^<example-class^> [^<args^>]
+ echo Usage: run-example ^<example-class^> [example-args]
+ echo - set MASTER=XX to use a specific master
+ echo - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)
goto exit
:arg_given
@@ -38,8 +40,14 @@ set EXAMPLES_DIR=%FWDIR%examples
rem Figure out the JAR file that our examples were packaged into.
set SPARK_EXAMPLES_JAR=
-for %%d in ("%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\spark-examples*assembly*.jar") do (
- set SPARK_EXAMPLES_JAR=%%d
+if exist "%FWDIR%RELEASE" (
+ for %%d in ("%FWDIR%lib\spark-examples*.jar") do (
+ set SPARK_EXAMPLES_JAR=%%d
+ )
+) else (
+ for %%d in ("%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\spark-examples*.jar") do (
+ set SPARK_EXAMPLES_JAR=%%d
+ )
)
if "x%SPARK_EXAMPLES_JAR%"=="x" (
echo Failed to find Spark examples assembly JAR.
@@ -47,15 +55,34 @@ if "x%SPARK_EXAMPLES_JAR%"=="x" (
goto exit
)
-rem Compute Spark classpath using external script
-set DONT_PRINT_CLASSPATH=1
-call "%FWDIR%bin\compute-classpath.cmd"
-set DONT_PRINT_CLASSPATH=0
-set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH%
+rem Set master from MASTER environment variable if given
+if "x%MASTER%"=="x" (
+ set EXAMPLE_MASTER=local[*]
+) else (
+ set EXAMPLE_MASTER=%MASTER%
+)
+
+rem If the EXAMPLE_CLASS does not start with org.apache.spark.examples, add that
+set EXAMPLE_CLASS=%1
+set PREFIX=%EXAMPLE_CLASS:~0,25%
+if not %PREFIX%==org.apache.spark.examples (
+ set EXAMPLE_CLASS=org.apache.spark.examples.%EXAMPLE_CLASS%
+)
+
+rem Get the tail of the argument list, to skip the first one. This is surprisingly
+rem complicated on Windows.
+set "ARGS="
+:top
+shift
+if "%~1" neq "" (
+ set ARGS=%ARGS% "%~1"
+ goto :top
+)
+if defined ARGS set ARGS=%ARGS:~1%
-rem Figure out where java is.
-set RUNNER=java
-if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
+call "%FWDIR%bin\spark-submit.cmd" ^
+ --master %EXAMPLE_MASTER% ^
+ --class %EXAMPLE_CLASS% ^
+ "%SPARK_EXAMPLES_JAR%" %ARGS%
-"%RUNNER%" -cp "%CLASSPATH%" %JAVA_OPTS% %*
:exit