aboutsummaryrefslogtreecommitdiff
path: root/bin/spark-daemon.sh
diff options
context:
space:
mode:
authorDenny <dennybritz@gmail.com>2012-08-01 13:17:31 -0700
committerDenny <dennybritz@gmail.com>2012-08-01 20:38:52 -0700
commit0ee44c225e38abbf3382be6e9555ab9a35424a54 (patch)
tree9fc106518bf55fc27083463843e9ee591c63ee59 /bin/spark-daemon.sh
parent545165e8153bb516d4364f9b3df1440c6b44c01b (diff)
downloadspark-0ee44c225e38abbf3382be6e9555ab9a35424a54.tar.gz
spark-0ee44c225e38abbf3382be6e9555ab9a35424a54.tar.bz2
spark-0ee44c225e38abbf3382be6e9555ab9a35424a54.zip
Spark standalone mode cluster scripts.
Heavily inspired by Hadoop cluster scripts ;-)
Diffstat (limited to 'bin/spark-daemon.sh')
-rwxr-xr-xbin/spark-daemon.sh135
1 files changed, 135 insertions, 0 deletions
diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh
new file mode 100755
index 0000000000..b5ecd9c1a2
--- /dev/null
+++ b/bin/spark-daemon.sh
@@ -0,0 +1,135 @@
+#!/usr/bin/env bash
+
+# Runs a Spark command as a daemon.
+#
+# Environment Variables
+#
+# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_PREFIX}/conf.
+# SPARK_LOG_DIR Where log files are stored. PWD by default.
+# SPARK_MASTER host:path where spark code should be rsync'd from
+# SPARK_PID_DIR The pid files are stored. /tmp by default.
+# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default
+# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
+##
+
+usage="Usage: spark-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <spark-command> <args...>"
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+ echo $usage
+ exit 1
+fi
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin/spark-config.sh"
+
+# get arguments
+startStop=$1
+shift
+command=$1
+shift
+
+spark_rotate_log ()
+{
+ log=$1;
+ num=5;
+ if [ -n "$2" ]; then
+ num=$2
+ fi
+ if [ -f "$log" ]; then # rotate logs
+ while [ $num -gt 1 ]; do
+ prev=`expr $num - 1`
+ [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
+ num=$prev
+ done
+ mv "$log" "$log.$num";
+ fi
+}
+
+if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
+ . "${SPARK_CONF_DIR}/spark-env.sh"
+fi
+
+if [ "$SPARK_IDENT_STRING" = "" ]; then
+ export SPARK_IDENT_STRING="$USER"
+fi
+
+# get log directory
+if [ "$SPARK_LOG_DIR" = "" ]; then
+ export SPARK_LOG_DIR="$SPARK_HOME/logs"
+fi
+mkdir -p "$SPARK_LOG_DIR"
+touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1
+TEST_LOG_DIR=$?
+if [ "${TEST_LOG_DIR}" = "0" ]; then
+ rm -f $SPARK_LOG_DIR/.spark_test
+else
+ chown $SPARK_IDENT_STRING $SPARK_LOG_DIR
+fi
+
+if [ "$SPARK_PID_DIR" = "" ]; then
+ SPARK_PID_DIR=/tmp
+fi
+
+# some variables
+export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$HOSTNAME.log
+export SPARK_ROOT_LOGGER="INFO,DRFA"
+log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$HOSTNAME.out
+pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command.pid
+
+# Set default scheduling priority
+if [ "$SPARK_NICENESS" = "" ]; then
+ export SPARK_NICENESS=0
+fi
+
+
+case $startStop in
+
+ (start)
+
+ mkdir -p "$SPARK_PID_DIR"
+
+ if [ -f $pid ]; then
+ if kill -0 `cat $pid` > /dev/null 2>&1; then
+ echo $command running as process `cat $pid`. Stop it first.
+ exit 1
+ fi
+ fi
+
+ if [ "$SPARK_MASTER" != "" ]; then
+ echo rsync from $SPARK_MASTER
+ rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
+ fi
+
+ spark_rotate_log $log
+ echo starting $command, logging to $log
+ cd "$SPARK_PREFIX"
+ nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/run $command "$@" > "$log" 2>&1 < /dev/null &
+ echo $! > $pid
+ sleep 1; head "$log"
+ ;;
+
+ (stop)
+
+ if [ -f $pid ]; then
+ if kill -0 `cat $pid` > /dev/null 2>&1; then
+ echo stopping $command
+ kill `cat $pid`
+ else
+ echo no $command to stop
+ fi
+ else
+ echo no $command to stop
+ fi
+ ;;
+
+ (*)
+ echo $usage
+ exit 1
+ ;;
+
+esac
+
+