diff options
author | Josh Rosen <joshrosen@eecs.berkeley.edu> | 2012-12-27 22:47:37 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@eecs.berkeley.edu> | 2012-12-27 22:47:37 -0800 |
commit | 665466dfff4f89196627a0777eabd3d3894cd296 (patch) | |
tree | 7fa580209756c5fdbb0a52930f30959bbbbc2ba3 /run2.cmd | |
parent | ac32447cd38beac8f6bc7a90be9fd24666bb46ad (diff) | |
download | spark-665466dfff4f89196627a0777eabd3d3894cd296.tar.gz spark-665466dfff4f89196627a0777eabd3d3894cd296.tar.bz2 spark-665466dfff4f89196627a0777eabd3d3894cd296.zip |
Simplify PySpark installation.
- Bundle Py4J binaries, since it's hard to install
- Uses Spark's `run` script to launch the Py4J
gateway, inheriting the settings in spark-env.sh
With these changes, (hopefully) nothing more than
running `sbt/sbt package` will be necessary to run
PySpark.
Diffstat (limited to 'run2.cmd')
-rw-r--r-- | run2.cmd | 2 |
1 files changed, 2 insertions, 0 deletions
@@ -34,6 +34,7 @@ set CORE_DIR=%FWDIR%core set REPL_DIR=%FWDIR%repl set EXAMPLES_DIR=%FWDIR%examples set BAGEL_DIR=%FWDIR%bagel +set PYSPARK_DIR=%FWDIR%pyspark rem Build up classpath set CLASSPATH=%SPARK_CLASSPATH%;%MESOS_CLASSPATH%;%FWDIR%conf;%CORE_DIR%\target\scala-%SCALA_VERSION%\classes @@ -42,6 +43,7 @@ set CLASSPATH=%CLASSPATH%;%REPL_DIR%\target\scala-%SCALA_VERSION%\classes;%EXAMP for /R "%FWDIR%\lib_managed\jars" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j for /R "%FWDIR%\lib_managed\bundles" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j for /R "%REPL_DIR%\lib" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j +for /R "%PYSPARK_DIR%\lib" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j set CLASSPATH=%CLASSPATH%;%BAGEL_DIR%\target\scala-%SCALA_VERSION%\classes rem Figure out whether to run our class with java or with the scala launcher. |