aboutsummaryrefslogtreecommitdiff
path: root/pyspark2.cmd
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-09-02 18:38:12 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-09-02 18:38:12 -0700
commita106ed8b97e707b36818c11d1d7211fa28636178 (patch)
tree5ce12b04c710bd8e776c31bc3c8cef63f3313622 /pyspark2.cmd
parent2ce200bf7f7a38afbcacf3303ca2418e49bdbe2a (diff)
parent59218bdd4996a13116009e3669b1b875be23a694 (diff)
downloadspark-a106ed8b97e707b36818c11d1d7211fa28636178.tar.gz
spark-a106ed8b97e707b36818c11d1d7211fa28636178.tar.bz2
spark-a106ed8b97e707b36818c11d1d7211fa28636178.zip
Merge remote-tracking branch 'old/master'
Diffstat (limited to 'pyspark2.cmd')
-rw-r--r--pyspark2.cmd55
1 files changed, 55 insertions, 0 deletions
diff --git a/pyspark2.cmd b/pyspark2.cmd
new file mode 100644
index 0000000000..f58e349643
--- /dev/null
+++ b/pyspark2.cmd
@@ -0,0 +1,55 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+set SCALA_VERSION=2.9.3
+
+rem Figure out where the Spark framework is installed
+set FWDIR=%~dp0
+
+rem Export this as SPARK_HOME
+set SPARK_HOME=%FWDIR%
+
+rem Test whether the user has built Spark
+if exist "%FWDIR%RELEASE" goto skip_build_test
+set FOUND_JAR=0
+for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+ set FOUND_JAR=1
+)
+if "%FOUND_JAR%"=="0" (
+ echo Failed to find Spark assembly JAR.
+ echo You need to build Spark with sbt\sbt assembly before running this program.
+ goto exit
+)
+:skip_build_test
+
+rem Load environment variables from conf\spark-env.cmd, if it exists
+if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+
+rem Figure out which Python to use.
+if "x%PYSPARK_PYTHON%"=="x" set PYSPARK_PYTHON=python
+
+set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
+
+set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
+set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
+
+echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
+
+"%PYSPARK_PYTHON%" %*
+:exit