aboutsummaryrefslogtreecommitdiff
path: root/run2.cmd
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@eecs.berkeley.edu>2012-12-27 22:47:37 -0800
committerJosh Rosen <joshrosen@eecs.berkeley.edu>2012-12-27 22:47:37 -0800
commit665466dfff4f89196627a0777eabd3d3894cd296 (patch)
tree7fa580209756c5fdbb0a52930f30959bbbbc2ba3 /run2.cmd
parentac32447cd38beac8f6bc7a90be9fd24666bb46ad (diff)
downloadspark-665466dfff4f89196627a0777eabd3d3894cd296.tar.gz
spark-665466dfff4f89196627a0777eabd3d3894cd296.tar.bz2
spark-665466dfff4f89196627a0777eabd3d3894cd296.zip
Simplify PySpark installation.
- Bundle Py4J binaries, since it's hard to install - Uses Spark's `run` script to launch the Py4J gateway, inheriting the settings in spark-env.sh With these changes, (hopefully) nothing more than running `sbt/sbt package` will be necessary to run PySpark.
Diffstat (limited to 'run2.cmd')
-rw-r--r--run2.cmd2
1 files changed, 2 insertions, 0 deletions
diff --git a/run2.cmd b/run2.cmd
index 097718b526..6024740726 100644
--- a/run2.cmd
+++ b/run2.cmd
@@ -34,6 +34,7 @@ set CORE_DIR=%FWDIR%core
set REPL_DIR=%FWDIR%repl
set EXAMPLES_DIR=%FWDIR%examples
set BAGEL_DIR=%FWDIR%bagel
+set PYSPARK_DIR=%FWDIR%pyspark
rem Build up classpath
set CLASSPATH=%SPARK_CLASSPATH%;%MESOS_CLASSPATH%;%FWDIR%conf;%CORE_DIR%\target\scala-%SCALA_VERSION%\classes
@@ -42,6 +43,7 @@ set CLASSPATH=%CLASSPATH%;%REPL_DIR%\target\scala-%SCALA_VERSION%\classes;%EXAMP
for /R "%FWDIR%\lib_managed\jars" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j
for /R "%FWDIR%\lib_managed\bundles" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j
for /R "%REPL_DIR%\lib" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j
+for /R "%PYSPARK_DIR%\lib" %%j in (*.jar) do set CLASSPATH=!CLASSPATH!;%%j
set CLASSPATH=%CLASSPATH%;%BAGEL_DIR%\target\scala-%SCALA_VERSION%\classes
rem Figure out whether to run our class with java or with the scala launcher.