aboutsummaryrefslogtreecommitdiff
path: root/run
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-07-08 14:00:04 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-07-08 14:00:04 -0700
commit0a472840030e4e7e84fe748f7bfa49f1ece599c5 (patch)
tree23631f3b930a22c37fb5106ef8f40fe9710a8708 /run
parent1aa63f775b9ecfa5225449de5cac8427c0e90d54 (diff)
downloadspark-0a472840030e4e7e84fe748f7bfa49f1ece599c5.tar.gz
spark-0a472840030e4e7e84fe748f7bfa49f1ece599c5.tar.bz2
spark-0a472840030e4e7e84fe748f7bfa49f1ece599c5.zip
More work to allow Spark to run on the standalone deploy cluster.
Diffstat (limited to 'run')
-rwxr-xr-xrun33
1 files changed, 26 insertions, 7 deletions
diff --git a/run b/run
index 5ba94b3243..d386892b95 100755
--- a/run
+++ b/run
@@ -13,15 +13,21 @@ if [ -e $FWDIR/conf/spark-env.sh ] ; then
. $FWDIR/conf/spark-env.sh
fi
+# Check that SCALA_HOME has been specified
+if [ -z "$SCALA_HOME" ]; then
+ echo "SCALA_HOME is not set" >&2
+ exit 1
+fi
+
# If the user specifies a Mesos JAR, put it before our included one on the classpath
MESOS_CLASSPATH=""
-if [ "x$MESOS_JAR" != "x" ] ; then
+if [ -z "$MESOS_JAR" ] ; then
MESOS_CLASSPATH="$MESOS_JAR"
fi
# Figure out how much memory to use per executor and set it as an environment
# variable so that our process sees it and can report it to Mesos
-if [ "x$SPARK_MEM" == "x" ] ; then
+if [ -z "$SPARK_MEM" ] ; then
SPARK_MEM="512m"
fi
export SPARK_MEM
@@ -61,13 +67,26 @@ done
for jar in `find $REPL_DIR/lib -name '*jar'`; do
CLASSPATH+=":$jar"
done
-CLASSPATH+=:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes
+CLASSPATH+=":$BAGEL_DIR/target/scala-$SCALA_VERSION/classes"
export CLASSPATH # Needed for spark-shell
-if [ -n "$SCALA_HOME" ]; then
- SCALA="${SCALA_HOME}/bin/scala"
+# Figure out whether to run our class with java or with the scala launcher.
+# In most cases, we'd prefer to execute our process with java because scala
+# creates a shell script as the parent of its Java process, which makes it
+# hard to kill the child with stuff like Process.destroy(). However, for
+# the Spark shell, the wrapper is necessary to properly reset the terminal
+# when we exit, so we allow it to set a variable to launch with scala.
+if [ "$SPARK_LAUNCH_WITH_SCALA" == "1" ]; then
+ RUNNER="${SCALA_HOME}/bin/scala"
else
- SCALA=scala
+ CLASSPATH+=":$SCALA_HOME/lib/scala-library.jar"
+ CLASSPATH+=":$SCALA_HOME/lib/scala-compiler.jar"
+ CLASSPATH+=":$SCALA_HOME/lib/jline.jar"
+ if [ -n "$JAVA_HOME" ]; then
+ RUNNER="${JAVA_HOME}/bin/java"
+ else
+ RUNNER=java
+ fi
fi
-exec "$SCALA" -cp "$CLASSPATH" "$@"
+exec "$RUNNER" -cp "$CLASSPATH" "$@"