diff options
author | Marcelo Vanzin <vanzin@cloudera.com> | 2016-03-14 11:13:26 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2016-03-14 11:13:26 -0700 |
commit | 45f8053be5c635b50c7b4ef5a0dc75d30f411291 (patch) | |
tree | 0626d57c95478d55dacad9df51cf68b005501301 /bin/spark-class2.cmd | |
parent | 9a87afd7d1dbcee993d58e648c74aa683d91f07e (diff) | |
download | spark-45f8053be5c635b50c7b4ef5a0dc75d30f411291.tar.gz spark-45f8053be5c635b50c7b4ef5a0dc75d30f411291.tar.bz2 spark-45f8053be5c635b50c7b4ef5a0dc75d30f411291.zip |
[SPARK-13578][CORE] Modify launch scripts to not use assemblies.
Instead of looking for a specially-named assembly, the scripts now will
blindly add all jars under the libs directory to the classpath. This
libs directory is still currently the old assembly dir, so things should
keep working the same way as before until we make more packaging changes.
The only lost feature is the detection of multiple assemblies; I consider
that a minor nicety that only really affects few developers, so it's probably
ok.
Tested locally by running spark-shell; also did some minor Win32 testing
(just made sure spark-shell started).
Author: Marcelo Vanzin <vanzin@cloudera.com>
Closes #11591 from vanzin/SPARK-13578.
Diffstat (limited to 'bin/spark-class2.cmd')
-rw-r--r-- | bin/spark-class2.cmd | 18 |
1 files changed, 6 insertions, 12 deletions
diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index c4fadb8223..565b87c102 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -28,33 +28,27 @@ if "x%1"=="x" ( exit /b 1 ) -rem Find assembly jar -set SPARK_ASSEMBLY_JAR=0 - +rem Find Spark jars. +rem TODO: change the directory name when Spark jars move from "lib". if exist "%SPARK_HOME%\RELEASE" ( - set ASSEMBLY_DIR="%SPARK_HOME%\lib" + set SPARK_JARS_DIR="%SPARK_HOME%\lib" ) else ( - set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%" + set SPARK_JARS_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%" ) -for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( - set SPARK_ASSEMBLY_JAR=%%d -) -if "%SPARK_ASSEMBLY_JAR%"=="0" ( +if not exist "%SPARK_JARS_DIR%"\ ( echo Failed to find Spark assembly JAR. echo You need to build Spark before running this program. exit /b 1 ) -set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR% +set LAUNCH_CLASSPATH=%SPARK_JARS_DIR%\* rem Add the launcher build dir to the classpath if requested. if not "x%SPARK_PREPEND_CLASSES%"=="x" ( set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%" ) -set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR% - rem Figure out where java is. set RUNNER=java if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java |