aboutsummaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2014-03-24 22:24:21 -0700
committerAaron Davidson <aaron@databricks.com>2014-03-24 22:24:21 -0700
commit007a733434aa39cdb137ab9795434ae2af70fe0b (patch)
tree431f3cfd674c8e825c51bd6a0ecda8d3cc19ab1f /bin
parentb637f2d91ab4d3d5bf13e8d959c919ebd776f6af (diff)
downloadspark-007a733434aa39cdb137ab9795434ae2af70fe0b.tar.gz
spark-007a733434aa39cdb137ab9795434ae2af70fe0b.tar.bz2
spark-007a733434aa39cdb137ab9795434ae2af70fe0b.zip
SPARK-1286: Make usage of spark-env.sh idempotent
Various spark scripts load spark-env.sh. This can cause growth of any variables that may be appended to (SPARK_CLASSPATH, SPARK_REPL_OPTS) and it makes the precedence order for options specified in spark-env.sh less clear. One use-case for the latter is that we want to set options from the command-line of spark-shell, but these options will be overridden by subsequent loading of spark-env.sh. If we were to load the spark-env.sh first and then set our command-line options, we could guarantee correct precedence order. Note that we use SPARK_CONF_DIR if available to support the sbin/ scripts, which always set this variable from sbin/spark-config.sh. Otherwise, we default to the ../conf/ as usual. Author: Aaron Davidson <aaron@databricks.com> Closes #184 from aarondav/idem and squashes the following commits: e291f91 [Aaron Davidson] Use "private" variables in load-spark-env.sh 8da8360 [Aaron Davidson] Add .sh extension to load-spark-env.sh 93a2471 [Aaron Davidson] SPARK-1286: Make usage of spark-env.sh idempotent
Diffstat (limited to 'bin')
-rwxr-xr-xbin/compute-classpath.sh5
-rw-r--r--bin/load-spark-env.sh35
-rwxr-xr-xbin/pyspark5
-rwxr-xr-xbin/run-example5
-rwxr-xr-xbin/spark-class5
-rwxr-xr-xbin/spark-shell4
6 files changed, 40 insertions, 19 deletions
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index 5f54391418..d6f1ff9084 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -25,10 +25,7 @@ SCALA_VERSION=2.10
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
+. $FWDIR/bin/load-spark-env.sh
# Build up classpath
CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
diff --git a/bin/load-spark-env.sh b/bin/load-spark-env.sh
new file mode 100644
index 0000000000..476dd82655
--- /dev/null
+++ b/bin/load-spark-env.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script loads spark-env.sh if it exists, and ensures it is only loaded once.
+# spark-env.sh is loaded from SPARK_CONF_DIR if set, or within the current directory's
+# conf/ subdirectory.
+
+if [ -z "$SPARK_ENV_LOADED" ]; then
+ export SPARK_ENV_LOADED=1
+
+ # Returns the parent of the directory this script lives in.
+ parent_dir="$(cd `dirname $0`/..; pwd)"
+
+ use_conf_dir=${SPARK_CONF_DIR:-"$parent_dir/conf"}
+
+ if [ -f "${use_conf_dir}/spark-env.sh" ]; then
+ . "${use_conf_dir}/spark-env.sh"
+ fi
+fi
diff --git a/bin/pyspark b/bin/pyspark
index ed6f8da730..67e1f61eeb 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -36,10 +36,7 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
fi
fi
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
+. $FWDIR/bin/load-spark-env.sh
# Figure out which Python executable to use
if [ -z "$PYSPARK_PYTHON" ] ; then
diff --git a/bin/run-example b/bin/run-example
index adba7dd97a..5af95a08c6 100755
--- a/bin/run-example
+++ b/bin/run-example
@@ -30,10 +30,7 @@ FWDIR="$(cd `dirname $0`/..; pwd)"
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
+. $FWDIR/bin/load-spark-env.sh
if [ -z "$1" ]; then
echo "Usage: run-example <example-class> [<args>]" >&2
diff --git a/bin/spark-class b/bin/spark-class
index a3efa2ff98..0dcf0e156c 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -30,10 +30,7 @@ FWDIR="$(cd `dirname $0`/..; pwd)"
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
+. $FWDIR/bin/load-spark-env.sh
if [ -z "$1" ]; then
echo "Usage: spark-class <class> [<args>]" >&2
diff --git a/bin/spark-shell b/bin/spark-shell
index 7d3fe3aca7..861ab60654 100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@ -81,9 +81,7 @@ done
# Set MASTER from spark-env if possible
DEFAULT_SPARK_MASTER_PORT=7077
if [ -z "$MASTER" ]; then
- if [ -e "$FWDIR/conf/spark-env.sh" ]; then
- . "$FWDIR/conf/spark-env.sh"
- fi
+ . $FWDIR/bin/load-spark-env.sh
if [ "x" != "x$SPARK_MASTER_IP" ]; then
if [ "y" != "y$SPARK_MASTER_PORT" ]; then
SPARK_MASTER_PORT="${SPARK_MASTER_PORT}"