aboutsummaryrefslogtreecommitdiff
path: root/bin/pyspark2.cmd
diff options
context:
space:
mode:
Diffstat (limited to 'bin/pyspark2.cmd')
-rw-r--r--bin/pyspark2.cmd57
1 files changed, 10 insertions, 47 deletions
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index a542ec80b4..4f5eb5e206 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -17,59 +17,22 @@ rem See the License for the specific language governing permissions and
rem limitations under the License.
rem
-set SCALA_VERSION=2.10
-
rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0..\
-
-rem Export this as SPARK_HOME
-set SPARK_HOME=%FWDIR%
-
-rem Test whether the user has built Spark
-if exist "%FWDIR%RELEASE" goto skip_build_test
-set FOUND_JAR=0
-for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
- set FOUND_JAR=1
-)
-if [%FOUND_JAR%] == [0] (
- echo Failed to find Spark assembly JAR.
- echo You need to build Spark before running this program.
- goto exit
-)
-:skip_build_test
+set SPARK_HOME=%~dp0..
rem Load environment variables from conf\spark-env.cmd, if it exists
-if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
rem Figure out which Python to use.
-if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
+if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
+ set PYSPARK_DRIVER_PYTHON=python
+ if not [%PYSPARK_PYTHON%] == [] set PYSPARK_DRIVER_PYTHON=%PYSPARK_PYTHON%
+)
-set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
-set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
+set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH%
+set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
-set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
-set PYSPARK_SUBMIT_ARGS=%*
-
-echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
-
-rem Check whether the argument is a file
-for /f %%i in ('echo %1^| findstr /R "\.py"') do (
- set PYTHON_FILE=%%i
-)
-
-if [%PYTHON_FILE%] == [] (
- if [%IPYTHON%] == [1] (
- ipython %IPYTHON_OPTS%
- ) else (
- %PYSPARK_PYTHON%
- )
-) else (
- echo.
- echo WARNING: Running python applications through ./bin/pyspark.cmd is deprecated as of Spark 1.0.
- echo Use ./bin/spark-submit ^<python file^>
- echo.
- "%FWDIR%\bin\spark-submit.cmd" %PYSPARK_SUBMIT_ARGS%
-)
+set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
-:exit
+call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main %*