From f08db010d3ba4eff132f5c06a087b0c7c2e58576 Mon Sep 17 00:00:00 2001 From: kalpit Date: Wed, 6 Mar 2013 18:06:32 -0800 Subject: added SPARK_WORKER_INSTANCES : allows spawning multiple worker instances/processes on every slave machine --- bin/spark-daemon.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'bin/spark-daemon.sh') diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh index 0c584055c7..d7dc62ab08 100755 --- a/bin/spark-daemon.sh +++ b/bin/spark-daemon.sh @@ -30,7 +30,7 @@ # SPARK_NICENESS The scheduling priority for daemons. Defaults to 0. ## -usage="Usage: spark-daemon.sh [--config ] [--hosts hostlistfile] (start|stop) " +usage="Usage: spark-daemon.sh [--config ] [--hosts hostlistfile] (start|stop) " # if no args specified, show usage if [ $# -le 1 ]; then @@ -48,6 +48,8 @@ startStop=$1 shift command=$1 shift +instance=$1 +shift spark_rotate_log () { @@ -95,7 +97,7 @@ fi export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$HOSTNAME.log export SPARK_ROOT_LOGGER="INFO,DRFA" log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$HOSTNAME.out -pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command.pid +pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid # Set default scheduling priority if [ "$SPARK_NICENESS" = "" ]; then -- cgit v1.2.3