From fcfe4f920484b64b01e4e22219d59c78ffd17054 Mon Sep 17 00:00:00 2001 From: shane-huang Date: Mon, 23 Sep 2013 12:42:34 +0800 Subject: add admin scripts to sbin Signed-off-by: shane-huang --- bin/compute-classpath.cmd | 69 ------------------- bin/compute-classpath.sh | 61 ----------------- bin/slaves.sh | 74 -------------------- bin/spark-config.sh | 36 ---------- bin/spark-daemon.sh | 164 --------------------------------------------- bin/spark-daemons.sh | 35 ---------- bin/start-all.sh | 34 ---------- bin/start-master.sh | 52 -------------- bin/start-slave.sh | 35 ---------- bin/start-slaves.sh | 48 ------------- bin/stop-all.sh | 32 --------- bin/stop-master.sh | 27 -------- bin/stop-slaves.sh | 37 ---------- docs/spark-standalone.md | 12 ++-- sbin/compute-classpath.cmd | 69 +++++++++++++++++++ sbin/compute-classpath.sh | 61 +++++++++++++++++ sbin/slaves.sh | 74 ++++++++++++++++++++ sbin/spark-config.sh | 36 ++++++++++ sbin/spark-daemon.sh | 164 +++++++++++++++++++++++++++++++++++++++++++++ sbin/spark-daemons.sh | 35 ++++++++++ sbin/start-all.sh | 34 ++++++++++ sbin/start-master.sh | 52 ++++++++++++++ sbin/start-slave.sh | 35 ++++++++++ sbin/start-slaves.sh | 48 +++++++++++++ sbin/stop-all.sh | 32 +++++++++ sbin/stop-master.sh | 27 ++++++++ sbin/stop-slaves.sh | 37 ++++++++++ 27 files changed, 710 insertions(+), 710 deletions(-) delete mode 100644 bin/compute-classpath.cmd delete mode 100755 bin/compute-classpath.sh delete mode 100755 bin/slaves.sh delete mode 100755 bin/spark-config.sh delete mode 100755 bin/spark-daemon.sh delete mode 100755 bin/spark-daemons.sh delete mode 100755 bin/start-all.sh delete mode 100755 bin/start-master.sh delete mode 100755 bin/start-slave.sh delete mode 100755 bin/start-slaves.sh delete mode 100755 bin/stop-all.sh delete mode 100755 bin/stop-master.sh delete mode 100755 bin/stop-slaves.sh create mode 100644 sbin/compute-classpath.cmd create mode 100755 sbin/compute-classpath.sh create mode 100755 sbin/slaves.sh create mode 100755 sbin/spark-config.sh create mode 100755 sbin/spark-daemon.sh create mode 100755 sbin/spark-daemons.sh create mode 100755 sbin/start-all.sh create mode 100755 sbin/start-master.sh create mode 100755 sbin/start-slave.sh create mode 100755 sbin/start-slaves.sh create mode 100755 sbin/stop-all.sh create mode 100755 sbin/stop-master.sh create mode 100755 sbin/stop-slaves.sh diff --git a/bin/compute-classpath.cmd b/bin/compute-classpath.cmd deleted file mode 100644 index cf38188c4b..0000000000 --- a/bin/compute-classpath.cmd +++ /dev/null @@ -1,69 +0,0 @@ -@echo off - -rem -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. -rem - -rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run" -rem script and the ExecutorRunner in standalone cluster mode. - -set SCALA_VERSION=2.9.3 - -rem Figure out where the Spark framework is installed -set FWDIR=%~dp0..\ - -rem Load environment variables from conf\spark-env.cmd, if it exists -if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd" - -rem Build up classpath -set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf -if exist "%FWDIR%RELEASE" ( - for %%d in ("%FWDIR%jars\spark-assembly*.jar") do ( - set ASSEMBLY_JAR=%%d - ) -) else ( - for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do ( - set ASSEMBLY_JAR=%%d - ) -) -set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR% - -if "x%SPARK_TESTING%"=="x1" ( - rem Add test clases to path - set CLASSPATH=%CLASSPATH%;%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes - set CLASSPATH=%CLASSPATH%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes - set CLASSPATH=%CLASSPATH%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes - set CLASSPATH=%CLASSPATH%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes - set CLASSPATH=%CLASSPATH%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes -) - -rem Add hadoop conf dir - else FileSystem.*, etc fail -rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts -rem the configurtion files. -if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir - set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR% -:no_hadoop_conf_dir - -if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir - set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR% -:no_yarn_conf_dir - -rem A bit of a hack to allow calling this script within run2.cmd without seeing output -if "%DONT_PRINT_CLASSPATH%"=="1" goto exit - -echo %CLASSPATH% - -:exit diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh deleted file mode 100755 index c7819d4932..0000000000 --- a/bin/compute-classpath.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# This script computes Spark's classpath and prints it to stdout; it's used by both the "run" -# script and the ExecutorRunner in standalone cluster mode. - -SCALA_VERSION=2.9.3 - -# Figure out where Spark is installed -FWDIR="$(cd `dirname $0`/..; pwd)" - -# Load environment variables from conf/spark-env.sh, if it exists -if [ -e $FWDIR/conf/spark-env.sh ] ; then - . $FWDIR/conf/spark-env.sh -fi - -# Build up classpath -CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf" -if [ -f "$FWDIR/RELEASE" ]; then - ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar` -else - ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar` -fi -CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR" - -# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1 -if [[ $SPARK_TESTING == 1 ]]; then - CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes" - CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/test-classes" - CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/test-classes" - CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/test-classes" - CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/test-classes" -fi - -# Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail ! -# Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts -# the configurtion files. -if [ "x" != "x$HADOOP_CONF_DIR" ]; then - CLASSPATH="$CLASSPATH:$HADOOP_CONF_DIR" -fi -if [ "x" != "x$YARN_CONF_DIR" ]; then - CLASSPATH="$CLASSPATH:$YARN_CONF_DIR" -fi - -echo "$CLASSPATH" diff --git a/bin/slaves.sh b/bin/slaves.sh deleted file mode 100755 index 752565b759..0000000000 --- a/bin/slaves.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Run a shell command on all slave hosts. -# -# Environment Variables -# -# SPARK_SLAVES File naming remote hosts. -# Default is ${SPARK_CONF_DIR}/slaves. -# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf. -# SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands. -# SPARK_SSH_OPTS Options passed to ssh when running remote commands. -## - -usage="Usage: slaves.sh [--config confdir] command..." - -# if no args specified, show usage -if [ $# -le 0 ]; then - echo $usage - exit 1 -fi - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -# If the slaves file is specified in the command line, -# then it takes precedence over the definition in -# spark-env.sh. Save it here. -HOSTLIST=$SPARK_SLAVES - -if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then - . "${SPARK_CONF_DIR}/spark-env.sh" -fi - -if [ "$HOSTLIST" = "" ]; then - if [ "$SPARK_SLAVES" = "" ]; then - export HOSTLIST="${SPARK_CONF_DIR}/slaves" - else - export HOSTLIST="${SPARK_SLAVES}" - fi -fi - -# By default disable strict host key checking -if [ "$SPARK_SSH_OPTS" = "" ]; then - SPARK_SSH_OPTS="-o StrictHostKeyChecking=no" -fi - -for slave in `cat "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do - ssh $SPARK_SSH_OPTS $slave $"${@// /\\ }" \ - 2>&1 | sed "s/^/$slave: /" & - if [ "$SPARK_SLAVE_SLEEP" != "" ]; then - sleep $SPARK_SLAVE_SLEEP - fi -done - -wait diff --git a/bin/spark-config.sh b/bin/spark-config.sh deleted file mode 100755 index cd2c7b7b0d..0000000000 --- a/bin/spark-config.sh +++ /dev/null @@ -1,36 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# included in all the spark scripts with source command -# should not be executable directly -# also should not be passed any arguments, since we need original $* - -# resolve links - $0 may be a softlink -this="${BASH_SOURCE-$0}" -common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P) -script="$(basename -- "$this")" -this="$common_bin/$script" - -# convert relative path to absolute path -config_bin=`dirname "$this"` -script=`basename "$this"` -config_bin=`cd "$config_bin"; pwd` -this="$config_bin/$script" - -export SPARK_PREFIX=`dirname "$this"`/.. -export SPARK_HOME=${SPARK_PREFIX} -export SPARK_CONF_DIR="$SPARK_HOME/conf" diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh deleted file mode 100755 index 6baee0c33b..0000000000 --- a/bin/spark-daemon.sh +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Runs a Spark command as a daemon. -# -# Environment Variables -# -# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_PREFIX}/conf. -# SPARK_LOG_DIR Where log files are stored. PWD by default. -# SPARK_MASTER host:path where spark code should be rsync'd from -# SPARK_PID_DIR The pid files are stored. /tmp by default. -# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default -# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0. -## - -usage="Usage: spark-daemon.sh [--config ] [--hosts hostlistfile] (start|stop) " - -# if no args specified, show usage -if [ $# -le 1 ]; then - echo $usage - exit 1 -fi - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -# get arguments -startStop=$1 -shift -command=$1 -shift -instance=$1 -shift - -spark_rotate_log () -{ - log=$1; - num=5; - if [ -n "$2" ]; then - num=$2 - fi - if [ -f "$log" ]; then # rotate logs - while [ $num -gt 1 ]; do - prev=`expr $num - 1` - [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num" - num=$prev - done - mv "$log" "$log.$num"; - fi -} - -if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then - . "${SPARK_CONF_DIR}/spark-env.sh" -fi - -if [ "$SPARK_IDENT_STRING" = "" ]; then - export SPARK_IDENT_STRING="$USER" -fi - - -export SPARK_PRINT_LAUNCH_COMMAND="1" - -# get log directory -if [ "$SPARK_LOG_DIR" = "" ]; then - export SPARK_LOG_DIR="$SPARK_HOME/logs" -fi -mkdir -p "$SPARK_LOG_DIR" -touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1 -TEST_LOG_DIR=$? -if [ "${TEST_LOG_DIR}" = "0" ]; then - rm -f $SPARK_LOG_DIR/.spark_test -else - chown $SPARK_IDENT_STRING $SPARK_LOG_DIR -fi - -if [ "$SPARK_PID_DIR" = "" ]; then - SPARK_PID_DIR=/tmp -fi - -# some variables -export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.log -export SPARK_ROOT_LOGGER="INFO,DRFA" -log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out -pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid - -# Set default scheduling priority -if [ "$SPARK_NICENESS" = "" ]; then - export SPARK_NICENESS=0 -fi - - -case $startStop in - - (start) - - mkdir -p "$SPARK_PID_DIR" - - if [ -f $pid ]; then - if kill -0 `cat $pid` > /dev/null 2>&1; then - echo $command running as process `cat $pid`. Stop it first. - exit 1 - fi - fi - - if [ "$SPARK_MASTER" != "" ]; then - echo rsync from $SPARK_MASTER - rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME" - fi - - spark_rotate_log "$log" - echo starting $command, logging to $log - cd "$SPARK_PREFIX" - nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/sbin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & - newpid=$! - echo $newpid > $pid - sleep 2 - # Check if the process has died; in that case we'll tail the log so the user can see - if ! kill -0 $newpid >/dev/null 2>&1; then - echo "failed to launch $command:" - tail -2 "$log" | sed 's/^/ /' - echo "full log in $log" - fi - ;; - - (stop) - - if [ -f $pid ]; then - if kill -0 `cat $pid` > /dev/null 2>&1; then - echo stopping $command - kill `cat $pid` - else - echo no $command to stop - fi - else - echo no $command to stop - fi - ;; - - (*) - echo $usage - exit 1 - ;; - -esac - - diff --git a/bin/spark-daemons.sh b/bin/spark-daemons.sh deleted file mode 100755 index 354eb905a1..0000000000 --- a/bin/spark-daemons.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Run a Spark command on all slave hosts. - -usage="Usage: spark-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] command instance-number args..." - -# if no args specified, show usage -if [ $# -le 1 ]; then - echo $usage - exit 1 -fi - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@" diff --git a/bin/start-all.sh b/bin/start-all.sh deleted file mode 100755 index 0182f1ab24..0000000000 --- a/bin/start-all.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Start all spark daemons. -# Starts the master on this node. -# Starts a worker on each node specified in conf/slaves - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -# Load the Spark configuration -. "$bin/spark-config.sh" - -# Start Master -"$bin"/start-master.sh - -# Start Workers -"$bin"/start-slaves.sh diff --git a/bin/start-master.sh b/bin/start-master.sh deleted file mode 100755 index 648c7ae75f..0000000000 --- a/bin/start-master.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Starts the master on the machine this script is executed on. - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then - . "${SPARK_CONF_DIR}/spark-env.sh" -fi - -if [ "$SPARK_MASTER_PORT" = "" ]; then - SPARK_MASTER_PORT=7077 -fi - -if [ "$SPARK_MASTER_IP" = "" ]; then - SPARK_MASTER_IP=`hostname` -fi - -if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then - SPARK_MASTER_WEBUI_PORT=8080 -fi - -# Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves -if [ "$SPARK_PUBLIC_DNS" = "" ]; then - # If we appear to be running on EC2, use the public address by default: - # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname - if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then - export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` - fi -fi - -"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT diff --git a/bin/start-slave.sh b/bin/start-slave.sh deleted file mode 100755 index 4eefa20944..0000000000 --- a/bin/start-slave.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Usage: start-slave.sh -# where is like "spark://localhost:7077" - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI -if [ "$SPARK_PUBLIC_DNS" = "" ]; then - # If we appear to be running on EC2, use the public address by default: - # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname - if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then - export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` - fi -fi - -"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" diff --git a/bin/start-slaves.sh b/bin/start-slaves.sh deleted file mode 100755 index 00dc4888b2..0000000000 --- a/bin/start-slaves.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then - . "${SPARK_CONF_DIR}/spark-env.sh" -fi - -# Find the port number for the master -if [ "$SPARK_MASTER_PORT" = "" ]; then - SPARK_MASTER_PORT=7077 -fi - -if [ "$SPARK_MASTER_IP" = "" ]; then - SPARK_MASTER_IP=`hostname` -fi - -# Launch the slaves -if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT -else - if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then - SPARK_WORKER_WEBUI_PORT=8081 - fi - for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) - done -fi diff --git a/bin/stop-all.sh b/bin/stop-all.sh deleted file mode 100755 index b6c83a7ba4..0000000000 --- a/bin/stop-all.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Start all spark daemons. -# Run this on the master nde - - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -# Load the Spark configuration -. "$bin/spark-config.sh" - -# Stop the slaves, then the master -"$bin"/stop-slaves.sh -"$bin"/stop-master.sh diff --git a/bin/stop-master.sh b/bin/stop-master.sh deleted file mode 100755 index 310e33bedc..0000000000 --- a/bin/stop-master.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Starts the master on the machine this script is executed on. - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 diff --git a/bin/stop-slaves.sh b/bin/stop-slaves.sh deleted file mode 100755 index 03e416a132..0000000000 --- a/bin/stop-slaves.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Starts the master on the machine this script is executed on. - -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` - -. "$bin/spark-config.sh" - -if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then - . "${SPARK_CONF_DIR}/spark-env.sh" -fi - -if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 -else - for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) - done -fi diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md index 9d4ad1ec8d..b3f9160673 100644 --- a/docs/spark-standalone.md +++ b/docs/spark-standalone.md @@ -67,12 +67,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`: -- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on. -- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. -- `bin/start-all.sh` - Starts both a master and a number of slaves as described above. -- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. -- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. -- `bin/stop-all.sh` - Stops both the master and the slaves as described above. +- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on. +- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. +- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above. +- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. +- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. +- `sbin/stop-all.sh` - Stops both the master and the slaves as described above. Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine. diff --git a/sbin/compute-classpath.cmd b/sbin/compute-classpath.cmd new file mode 100644 index 0000000000..cf38188c4b --- /dev/null +++ b/sbin/compute-classpath.cmd @@ -0,0 +1,69 @@ +@echo off + +rem +rem Licensed to the Apache Software Foundation (ASF) under one or more +rem contributor license agreements. See the NOTICE file distributed with +rem this work for additional information regarding copyright ownership. +rem The ASF licenses this file to You under the Apache License, Version 2.0 +rem (the "License"); you may not use this file except in compliance with +rem the License. You may obtain a copy of the License at +rem +rem http://www.apache.org/licenses/LICENSE-2.0 +rem +rem Unless required by applicable law or agreed to in writing, software +rem distributed under the License is distributed on an "AS IS" BASIS, +rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +rem See the License for the specific language governing permissions and +rem limitations under the License. +rem + +rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run" +rem script and the ExecutorRunner in standalone cluster mode. + +set SCALA_VERSION=2.9.3 + +rem Figure out where the Spark framework is installed +set FWDIR=%~dp0..\ + +rem Load environment variables from conf\spark-env.cmd, if it exists +if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd" + +rem Build up classpath +set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf +if exist "%FWDIR%RELEASE" ( + for %%d in ("%FWDIR%jars\spark-assembly*.jar") do ( + set ASSEMBLY_JAR=%%d + ) +) else ( + for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do ( + set ASSEMBLY_JAR=%%d + ) +) +set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR% + +if "x%SPARK_TESTING%"=="x1" ( + rem Add test clases to path + set CLASSPATH=%CLASSPATH%;%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes + set CLASSPATH=%CLASSPATH%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes + set CLASSPATH=%CLASSPATH%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes + set CLASSPATH=%CLASSPATH%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes + set CLASSPATH=%CLASSPATH%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes +) + +rem Add hadoop conf dir - else FileSystem.*, etc fail +rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts +rem the configurtion files. +if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir + set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR% +:no_hadoop_conf_dir + +if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir + set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR% +:no_yarn_conf_dir + +rem A bit of a hack to allow calling this script within run2.cmd without seeing output +if "%DONT_PRINT_CLASSPATH%"=="1" goto exit + +echo %CLASSPATH% + +:exit diff --git a/sbin/compute-classpath.sh b/sbin/compute-classpath.sh new file mode 100755 index 0000000000..c7819d4932 --- /dev/null +++ b/sbin/compute-classpath.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This script computes Spark's classpath and prints it to stdout; it's used by both the "run" +# script and the ExecutorRunner in standalone cluster mode. + +SCALA_VERSION=2.9.3 + +# Figure out where Spark is installed +FWDIR="$(cd `dirname $0`/..; pwd)" + +# Load environment variables from conf/spark-env.sh, if it exists +if [ -e $FWDIR/conf/spark-env.sh ] ; then + . $FWDIR/conf/spark-env.sh +fi + +# Build up classpath +CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf" +if [ -f "$FWDIR/RELEASE" ]; then + ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar` +else + ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar` +fi +CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR" + +# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1 +if [[ $SPARK_TESTING == 1 ]]; then + CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes" + CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/test-classes" + CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/test-classes" + CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/test-classes" + CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/test-classes" +fi + +# Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail ! +# Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts +# the configurtion files. +if [ "x" != "x$HADOOP_CONF_DIR" ]; then + CLASSPATH="$CLASSPATH:$HADOOP_CONF_DIR" +fi +if [ "x" != "x$YARN_CONF_DIR" ]; then + CLASSPATH="$CLASSPATH:$YARN_CONF_DIR" +fi + +echo "$CLASSPATH" diff --git a/sbin/slaves.sh b/sbin/slaves.sh new file mode 100755 index 0000000000..68408bcad8 --- /dev/null +++ b/sbin/slaves.sh @@ -0,0 +1,74 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Run a shell command on all slave hosts. +# +# Environment Variables +# +# SPARK_SLAVES File naming remote hosts. +# Default is ${SPARK_CONF_DIR}/slaves. +# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf. +# SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands. +# SPARK_SSH_OPTS Options passed to ssh when running remote commands. +## + +usage="Usage: slaves.sh [--config confdir] command..." + +# if no args specified, show usage +if [ $# -le 0 ]; then + echo $usage + exit 1 +fi + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +# If the slaves file is specified in the command line, +# then it takes precedence over the definition in +# spark-env.sh. Save it here. +HOSTLIST=$SPARK_SLAVES + +if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then + . "${SPARK_CONF_DIR}/spark-env.sh" +fi + +if [ "$HOSTLIST" = "" ]; then + if [ "$SPARK_SLAVES" = "" ]; then + export HOSTLIST="${SPARK_CONF_DIR}/slaves" + else + export HOSTLIST="${SPARK_SLAVES}" + fi +fi + +# By default disable strict host key checking +if [ "$SPARK_SSH_OPTS" = "" ]; then + SPARK_SSH_OPTS="-o StrictHostKeyChecking=no" +fi + +for slave in `cat "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do + ssh $SPARK_SSH_OPTS $slave $"${@// /\\ }" \ + 2>&1 | sed "s/^/$slave: /" & + if [ "$SPARK_SLAVE_SLEEP" != "" ]; then + sleep $SPARK_SLAVE_SLEEP + fi +done + +wait diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh new file mode 100755 index 0000000000..cd2c7b7b0d --- /dev/null +++ b/sbin/spark-config.sh @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# included in all the spark scripts with source command +# should not be executable directly +# also should not be passed any arguments, since we need original $* + +# resolve links - $0 may be a softlink +this="${BASH_SOURCE-$0}" +common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P) +script="$(basename -- "$this")" +this="$common_bin/$script" + +# convert relative path to absolute path +config_bin=`dirname "$this"` +script=`basename "$this"` +config_bin=`cd "$config_bin"; pwd` +this="$config_bin/$script" + +export SPARK_PREFIX=`dirname "$this"`/.. +export SPARK_HOME=${SPARK_PREFIX} +export SPARK_CONF_DIR="$SPARK_HOME/conf" diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh new file mode 100755 index 0000000000..ae82349cc6 --- /dev/null +++ b/sbin/spark-daemon.sh @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Runs a Spark command as a daemon. +# +# Environment Variables +# +# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_PREFIX}/conf. +# SPARK_LOG_DIR Where log files are stored. PWD by default. +# SPARK_MASTER host:path where spark code should be rsync'd from +# SPARK_PID_DIR The pid files are stored. /tmp by default. +# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default +# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0. +## + +usage="Usage: spark-daemon.sh [--config ] [--hosts hostlistfile] (start|stop) " + +# if no args specified, show usage +if [ $# -le 1 ]; then + echo $usage + exit 1 +fi + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +# get arguments +startStop=$1 +shift +command=$1 +shift +instance=$1 +shift + +spark_rotate_log () +{ + log=$1; + num=5; + if [ -n "$2" ]; then + num=$2 + fi + if [ -f "$log" ]; then # rotate logs + while [ $num -gt 1 ]; do + prev=`expr $num - 1` + [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num" + num=$prev + done + mv "$log" "$log.$num"; + fi +} + +if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then + . "${SPARK_CONF_DIR}/spark-env.sh" +fi + +if [ "$SPARK_IDENT_STRING" = "" ]; then + export SPARK_IDENT_STRING="$USER" +fi + + +export SPARK_PRINT_LAUNCH_COMMAND="1" + +# get log directory +if [ "$SPARK_LOG_DIR" = "" ]; then + export SPARK_LOG_DIR="$SPARK_HOME/logs" +fi +mkdir -p "$SPARK_LOG_DIR" +touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1 +TEST_LOG_DIR=$? +if [ "${TEST_LOG_DIR}" = "0" ]; then + rm -f $SPARK_LOG_DIR/.spark_test +else + chown $SPARK_IDENT_STRING $SPARK_LOG_DIR +fi + +if [ "$SPARK_PID_DIR" = "" ]; then + SPARK_PID_DIR=/tmp +fi + +# some variables +export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.log +export SPARK_ROOT_LOGGER="INFO,DRFA" +log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out +pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid + +# Set default scheduling priority +if [ "$SPARK_NICENESS" = "" ]; then + export SPARK_NICENESS=0 +fi + + +case $startStop in + + (start) + + mkdir -p "$SPARK_PID_DIR" + + if [ -f $pid ]; then + if kill -0 `cat $pid` > /dev/null 2>&1; then + echo $command running as process `cat $pid`. Stop it first. + exit 1 + fi + fi + + if [ "$SPARK_MASTER" != "" ]; then + echo rsync from $SPARK_MASTER + rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME" + fi + + spark_rotate_log "$log" + echo starting $command, logging to $log + cd "$SPARK_PREFIX" + nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/sbin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & + newpid=$! + echo $newpid > $pid + sleep 2 + # Check if the process has died; in that case we'll tail the log so the user can see + if ! kill -0 $newpid >/dev/null 2>&1; then + echo "failed to launch $command:" + tail -2 "$log" | sed 's/^/ /' + echo "full log in $log" + fi + ;; + + (stop) + + if [ -f $pid ]; then + if kill -0 `cat $pid` > /dev/null 2>&1; then + echo stopping $command + kill `cat $pid` + else + echo no $command to stop + fi + else + echo no $command to stop + fi + ;; + + (*) + echo $usage + exit 1 + ;; + +esac + + diff --git a/sbin/spark-daemons.sh b/sbin/spark-daemons.sh new file mode 100755 index 0000000000..d91254b690 --- /dev/null +++ b/sbin/spark-daemons.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Run a Spark command on all slave hosts. + +usage="Usage: spark-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] command instance-number args..." + +# if no args specified, show usage +if [ $# -le 1 ]; then + echo $usage + exit 1 +fi + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@" diff --git a/sbin/start-all.sh b/sbin/start-all.sh new file mode 100755 index 0000000000..2daf49db35 --- /dev/null +++ b/sbin/start-all.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Start all spark daemons. +# Starts the master on this node. +# Starts a worker on each node specified in conf/slaves + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +# Load the Spark configuration +. "$sbin/spark-config.sh" + +# Start Master +"$sbin"/start-master.sh + +# Start Workers +"$sbin"/start-slaves.sh diff --git a/sbin/start-master.sh b/sbin/start-master.sh new file mode 100755 index 0000000000..3dcf7cc348 --- /dev/null +++ b/sbin/start-master.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Starts the master on the machine this script is executed on. + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then + . "${SPARK_CONF_DIR}/spark-env.sh" +fi + +if [ "$SPARK_MASTER_PORT" = "" ]; then + SPARK_MASTER_PORT=7077 +fi + +if [ "$SPARK_MASTER_IP" = "" ]; then + SPARK_MASTER_IP=`hostname` +fi + +if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then + SPARK_MASTER_WEBUI_PORT=8080 +fi + +# Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves +if [ "$SPARK_PUBLIC_DNS" = "" ]; then + # If we appear to be running on EC2, use the public address by default: + # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname + if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then + export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` + fi +fi + +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh new file mode 100755 index 0000000000..524be38c62 --- /dev/null +++ b/sbin/start-slave.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Usage: start-slave.sh +# where is like "spark://localhost:7077" + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI +if [ "$SPARK_PUBLIC_DNS" = "" ]; then + # If we appear to be running on EC2, use the public address by default: + # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname + if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then + export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` + fi +fi + +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh new file mode 100755 index 0000000000..fd5cdeb1e6 --- /dev/null +++ b/sbin/start-slaves.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then + . "${SPARK_CONF_DIR}/spark-env.sh" +fi + +# Find the port number for the master +if [ "$SPARK_MASTER_PORT" = "" ]; then + SPARK_MASTER_PORT=7077 +fi + +if [ "$SPARK_MASTER_IP" = "" ]; then + SPARK_MASTER_IP=`hostname` +fi + +# Launch the slaves +if [ "$SPARK_WORKER_INSTANCES" = "" ]; then + exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT +else + if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then + SPARK_WORKER_WEBUI_PORT=8081 + fi + for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do + "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) + done +fi diff --git a/sbin/stop-all.sh b/sbin/stop-all.sh new file mode 100755 index 0000000000..60b358d374 --- /dev/null +++ b/sbin/stop-all.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Start all spark daemons. +# Run this on the master nde + + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +# Load the Spark configuration +. "$sbin/spark-config.sh" + +# Stop the slaves, then the master +"$sbin"/stop-slaves.sh +"$sbin"/stop-master.sh diff --git a/sbin/stop-master.sh b/sbin/stop-master.sh new file mode 100755 index 0000000000..2adabd4265 --- /dev/null +++ b/sbin/stop-master.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Starts the master on the machine this script is executed on. + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh new file mode 100755 index 0000000000..63802e6df5 --- /dev/null +++ b/sbin/stop-slaves.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Starts the master on the machine this script is executed on. + +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` + +. "$sbin/spark-config.sh" + +if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then + . "${SPARK_CONF_DIR}/spark-env.sh" +fi + +if [ "$SPARK_WORKER_INSTANCES" = "" ]; then + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 +else + for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) + done +fi -- cgit v1.2.3