aboutsummaryrefslogtreecommitdiff
path: root/bin/spark-submit2.cmd
diff options
context:
space:
mode:
authorMasayoshi TSUZUKI <tsudukim@oss.nttdata.co.jp>2014-10-14 18:50:14 -0700
committerAndrew Or <andrewor14@gmail.com>2014-10-14 18:50:14 -0700
commit66af8e2508bfe9c9d4aecc17a19f297c98e9661d (patch)
tree11918e2e8a293fb20ed6427613b5c134f47f90c0 /bin/spark-submit2.cmd
parent7b4f39f647da1f7b1b57e38827a8639243c661cb (diff)
downloadspark-66af8e2508bfe9c9d4aecc17a19f297c98e9661d.tar.gz
spark-66af8e2508bfe9c9d4aecc17a19f297c98e9661d.tar.bz2
spark-66af8e2508bfe9c9d4aecc17a19f297c98e9661d.zip
[SPARK-3943] Some scripts bin\*.cmd pollutes environment variables in Windows
Modified not to pollute environment variables. Just moved the main logic into `XXX2.cmd` from `XXX.cmd`, and call `XXX2.cmd` with cmd command in `XXX.cmd`. `pyspark.cmd` and `spark-class.cmd` are already using the same way, but `spark-shell.cmd`, `spark-submit.cmd` and `/python/docs/make.bat` are not. Author: Masayoshi TSUZUKI <tsudukim@oss.nttdata.co.jp> Closes #2797 from tsudukim/feature/SPARK-3943 and squashes the following commits: b397a7d [Masayoshi TSUZUKI] [SPARK-3943] Some scripts bin\*.cmd pollutes environment variables in Windows
Diffstat (limited to 'bin/spark-submit2.cmd')
-rw-r--r--bin/spark-submit2.cmd68
1 files changed, 68 insertions, 0 deletions
diff --git a/bin/spark-submit2.cmd b/bin/spark-submit2.cmd
new file mode 100644
index 0000000000..cf6046d154
--- /dev/null
+++ b/bin/spark-submit2.cmd
@@ -0,0 +1,68 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
+
+set SPARK_HOME=%~dp0..
+set ORIG_ARGS=%*
+
+rem Reset the values of all variables used
+set SPARK_SUBMIT_DEPLOY_MODE=client
+set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
+set SPARK_SUBMIT_DRIVER_MEMORY=
+set SPARK_SUBMIT_LIBRARY_PATH=
+set SPARK_SUBMIT_CLASSPATH=
+set SPARK_SUBMIT_OPTS=
+set SPARK_SUBMIT_BOOTSTRAP_DRIVER=
+
+:loop
+if [%1] == [] goto continue
+ if [%1] == [--deploy-mode] (
+ set SPARK_SUBMIT_DEPLOY_MODE=%2
+ ) else if [%1] == [--properties-file] (
+ set SPARK_SUBMIT_PROPERTIES_FILE=%2
+ ) else if [%1] == [--driver-memory] (
+ set SPARK_SUBMIT_DRIVER_MEMORY=%2
+ ) else if [%1] == [--driver-library-path] (
+ set SPARK_SUBMIT_LIBRARY_PATH=%2
+ ) else if [%1] == [--driver-class-path] (
+ set SPARK_SUBMIT_CLASSPATH=%2
+ ) else if [%1] == [--driver-java-options] (
+ set SPARK_SUBMIT_OPTS=%2
+ )
+ shift
+goto loop
+:continue
+
+rem For client mode, the driver will be launched in the same JVM that launches
+rem SparkSubmit, so we may need to read the properties file for any extra class
+rem paths, library paths, java options and memory early on. Otherwise, it will
+rem be too late by the time the driver JVM has started.
+
+if [%SPARK_SUBMIT_DEPLOY_MODE%] == [client] (
+ if exist %SPARK_SUBMIT_PROPERTIES_FILE% (
+ rem Parse the properties file only if the special configs exist
+ for /f %%i in ('findstr /r /c:"^[\t ]*spark.driver.memory" /c:"^[\t ]*spark.driver.extra" ^
+ %SPARK_SUBMIT_PROPERTIES_FILE%') do (
+ set SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
+ )
+ )
+)
+
+cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%