aboutsummaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-01-02 17:55:21 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2014-01-02 17:55:21 +0530
commit980afd280a331103ce7391adaf484dd497218741 (patch)
treea3bef12d6c1296aa42930b8b91a07d8775831e6c /bin
parent3713f8129a618a633a7aca8c944960c3e7ac9d3b (diff)
parent52ccf4f859d92ed9e86d3720a983ac2c4a1c23bf (diff)
downloadspark-980afd280a331103ce7391adaf484dd497218741.tar.gz
spark-980afd280a331103ce7391adaf484dd497218741.tar.bz2
spark-980afd280a331103ce7391adaf484dd497218741.zip
Merge branch 'scripts-reorg' of github.com:shane-huang/incubator-spark into spark-915-segregate-scripts
Conflicts: bin/spark-shell core/pom.xml core/src/main/scala/org/apache/spark/SparkContext.scala core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala core/src/test/scala/org/apache/spark/DriverSuite.scala python/run-tests sbin/compute-classpath.sh sbin/spark-class sbin/stop-slaves.sh
Diffstat (limited to 'bin')
-rw-r--r--bin/compute-classpath.cmd69
-rwxr-xr-xbin/compute-classpath.sh75
-rwxr-xr-xbin/pyspark70
-rw-r--r--bin/pyspark.cmd23
-rw-r--r--bin/pyspark2.cmd55
-rwxr-xr-xbin/run-example91
-rw-r--r--bin/run-example.cmd23
-rw-r--r--bin/run-example2.cmd61
-rwxr-xr-xbin/slaves.sh91
-rwxr-xr-xbin/spark-config.sh36
-rwxr-xr-xbin/spark-daemon.sh183
-rwxr-xr-xbin/spark-daemons.sh35
-rwxr-xr-xbin/spark-shell102
-rw-r--r--bin/spark-shell.cmd23
-rwxr-xr-xbin/start-all.sh34
-rwxr-xr-xbin/start-master.sh52
-rwxr-xr-xbin/start-slave.sh35
-rwxr-xr-xbin/start-slaves.sh48
-rwxr-xr-xbin/stop-all.sh32
-rwxr-xr-xbin/stop-master.sh27
-rwxr-xr-xbin/stop-slaves.sh35
21 files changed, 448 insertions, 752 deletions
diff --git a/bin/compute-classpath.cmd b/bin/compute-classpath.cmd
deleted file mode 100644
index 9e3e10ecaa..0000000000
--- a/bin/compute-classpath.cmd
+++ /dev/null
@@ -1,69 +0,0 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements. See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License. You may obtain a copy of the License at
-rem
-rem http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
-rem script and the ExecutorRunner in standalone cluster mode.
-
-set SCALA_VERSION=2.10
-
-rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0..\
-
-rem Load environment variables from conf\spark-env.cmd, if it exists
-if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
-
-rem Build up classpath
-set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf
-if exist "%FWDIR%RELEASE" (
- for %%d in ("%FWDIR%jars\spark-assembly*.jar") do (
- set ASSEMBLY_JAR=%%d
- )
-) else (
- for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
- set ASSEMBLY_JAR=%%d
- )
-)
-set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
-
-if "x%SPARK_TESTING%"=="x1" (
- rem Add test clases to path
- set CLASSPATH=%CLASSPATH%;%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
- set CLASSPATH=%CLASSPATH%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
- set CLASSPATH=%CLASSPATH%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
- set CLASSPATH=%CLASSPATH%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
- set CLASSPATH=%CLASSPATH%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
-)
-
-rem Add hadoop conf dir - else FileSystem.*, etc fail
-rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
-rem the configurtion files.
-if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
- set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
-:no_hadoop_conf_dir
-
-if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
- set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
-:no_yarn_conf_dir
-
-rem A bit of a hack to allow calling this script within run2.cmd without seeing output
-if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
-
-echo %CLASSPATH%
-
-:exit
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
deleted file mode 100755
index 40555089fc..0000000000
--- a/bin/compute-classpath.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
-# script and the ExecutorRunner in standalone cluster mode.
-
-SCALA_VERSION=2.10
-
-# Figure out where Spark is installed
-FWDIR="$(cd `dirname $0`/..; pwd)"
-
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e $FWDIR/conf/spark-env.sh ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
-
-# Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
-
-# First check if we have a dependencies jar. If so, include binary classes with the deps jar
-if [ -f "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar ]; then
- CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
- CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
- CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
- CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
- CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes"
-
- DEPS_ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar`
- CLASSPATH="$CLASSPATH:$DEPS_ASSEMBLY_JAR"
-else
- # Else use spark-assembly jar from either RELEASE or assembly directory
- if [ -f "$FWDIR/RELEASE" ]; then
- ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
- else
- ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar`
- fi
- CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
-fi
-
-# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1
-if [[ $SPARK_TESTING == 1 ]]; then
- CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes"
- CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/test-classes"
- CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/test-classes"
- CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/test-classes"
- CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/test-classes"
-fi
-
-# Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail !
-# Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
-# the configurtion files.
-if [ "x" != "x$HADOOP_CONF_DIR" ]; then
- CLASSPATH="$CLASSPATH:$HADOOP_CONF_DIR"
-fi
-if [ "x" != "x$YARN_CONF_DIR" ]; then
- CLASSPATH="$CLASSPATH:$YARN_CONF_DIR"
-fi
-
-echo "$CLASSPATH"
diff --git a/bin/pyspark b/bin/pyspark
new file mode 100755
index 0000000000..d6810f4686
--- /dev/null
+++ b/bin/pyspark
@@ -0,0 +1,70 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Figure out where the Scala framework is installed
+FWDIR="$(cd `dirname $0`/..; pwd)"
+
+# Export this as SPARK_HOME
+export SPARK_HOME="$FWDIR"
+
+SCALA_VERSION=2.10
+
+# Exit if the user hasn't compiled Spark
+if [ ! -f "$FWDIR/RELEASE" ]; then
+ # Exit if the user hasn't compiled Spark
+ ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
+ if [[ $? != 0 ]]; then
+ echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
+ echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
+ exit 1
+ fi
+fi
+
+# Load environment variables from conf/spark-env.sh, if it exists
+if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+ . $FWDIR/conf/spark-env.sh
+fi
+
+# Figure out which Python executable to use
+if [ -z "$PYSPARK_PYTHON" ] ; then
+ PYSPARK_PYTHON="python"
+fi
+export PYSPARK_PYTHON
+
+# Add the PySpark classes to the Python path:
+export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
+
+# Load the PySpark shell.py script when ./pyspark is used interactively:
+export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
+export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
+
+if [ -n "$IPYTHON_OPTS" ]; then
+ IPYTHON=1
+fi
+
+if [[ "$IPYTHON" = "1" ]] ; then
+ # IPython <1.0.0 doesn't honor PYTHONSTARTUP, while 1.0.0+ does.
+ # Hence we clear PYTHONSTARTUP and use the -c "%run $IPYTHONSTARTUP" command which works on all versions
+ # We also force interactive mode with "-i"
+ IPYTHONSTARTUP=$PYTHONSTARTUP
+ PYTHONSTARTUP=
+ exec ipython "$IPYTHON_OPTS" -i -c "%run $IPYTHONSTARTUP"
+else
+ exec "$PYSPARK_PYTHON" "$@"
+fi
diff --git a/bin/pyspark.cmd b/bin/pyspark.cmd
new file mode 100644
index 0000000000..7c26fbbac2
--- /dev/null
+++ b/bin/pyspark.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running PySpark. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C %~dp0pyspark2.cmd %*
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
new file mode 100644
index 0000000000..95791095ec
--- /dev/null
+++ b/bin/pyspark2.cmd
@@ -0,0 +1,55 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+set SCALA_VERSION=2.10
+
+rem Figure out where the Spark framework is installed
+set FWDIR=%~dp0..\
+
+rem Export this as SPARK_HOME
+set SPARK_HOME=%FWDIR%
+
+rem Test whether the user has built Spark
+if exist "%FWDIR%RELEASE" goto skip_build_test
+set FOUND_JAR=0
+for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+ set FOUND_JAR=1
+)
+if "%FOUND_JAR%"=="0" (
+ echo Failed to find Spark assembly JAR.
+ echo You need to build Spark with sbt\sbt assembly before running this program.
+ goto exit
+)
+:skip_build_test
+
+rem Load environment variables from conf\spark-env.cmd, if it exists
+if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+
+rem Figure out which Python to use.
+if "x%PYSPARK_PYTHON%"=="x" set PYSPARK_PYTHON=python
+
+set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
+
+set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
+set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
+
+echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
+
+"%PYSPARK_PYTHON%" %*
+:exit
diff --git a/bin/run-example b/bin/run-example
new file mode 100755
index 0000000000..f2699c38a9
--- /dev/null
+++ b/bin/run-example
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+cygwin=false
+case "`uname`" in
+ CYGWIN*) cygwin=true;;
+esac
+
+SCALA_VERSION=2.10
+
+# Figure out where the Scala framework is installed
+FWDIR="$(cd `dirname $0`/..; pwd)"
+
+# Export this as SPARK_HOME
+export SPARK_HOME="$FWDIR"
+
+# Load environment variables from conf/spark-env.sh, if it exists
+if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+ . $FWDIR/conf/spark-env.sh
+fi
+
+if [ -z "$1" ]; then
+ echo "Usage: run-example <example-class> [<args>]" >&2
+ exit 1
+fi
+
+# Figure out the JAR file that our examples were packaged into. This includes a bit of a hack
+# to avoid the -sources and -doc packages that are built by publish-local.
+EXAMPLES_DIR="$FWDIR"/examples
+SPARK_EXAMPLES_JAR=""
+if [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
+ # Use the JAR from the SBT build
+ export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
+fi
+if [ -e "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar ]; then
+ # Use the JAR from the Maven build
+ # TODO: this also needs to become an assembly!
+ export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar`
+fi
+if [[ -z $SPARK_EXAMPLES_JAR ]]; then
+ echo "Failed to find Spark examples assembly in $FWDIR/examples/target" >&2
+ echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
+ exit 1
+fi
+
+# Since the examples JAR ideally shouldn't include spark-core (that dependency should be
+# "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
+CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
+CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
+
+if $cygwin; then
+ CLASSPATH=`cygpath -wp $CLASSPATH`
+ export SPARK_EXAMPLES_JAR=`cygpath -w $SPARK_EXAMPLES_JAR`
+fi
+
+# Find java binary
+if [ -n "${JAVA_HOME}" ]; then
+ RUNNER="${JAVA_HOME}/bin/java"
+else
+ if [ `command -v java` ]; then
+ RUNNER="java"
+ else
+ echo "JAVA_HOME is not set" >&2
+ exit 1
+ fi
+fi
+
+if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
+ echo -n "Spark Command: "
+ echo "$RUNNER" -cp "$CLASSPATH" "$@"
+ echo "========================================"
+ echo
+fi
+
+exec "$RUNNER" -cp "$CLASSPATH" "$@"
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
new file mode 100644
index 0000000000..5b2d048d6e
--- /dev/null
+++ b/bin/run-example.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running a Spark example. To avoid polluting
+rem the environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C %~dp0run-example2.cmd %*
diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd
new file mode 100644
index 0000000000..6861334cb0
--- /dev/null
+++ b/bin/run-example2.cmd
@@ -0,0 +1,61 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+set SCALA_VERSION=2.10
+
+rem Figure out where the Spark framework is installed
+set FWDIR=%~dp0..\
+
+rem Export this as SPARK_HOME
+set SPARK_HOME=%FWDIR%
+
+rem Load environment variables from conf\spark-env.cmd, if it exists
+if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+
+rem Test that an argument was given
+if not "x%1"=="x" goto arg_given
+ echo Usage: run-example ^<example-class^> [^<args^>]
+ goto exit
+:arg_given
+
+set EXAMPLES_DIR=%FWDIR%examples
+
+rem Figure out the JAR file that our examples were packaged into.
+set SPARK_EXAMPLES_JAR=
+for %%d in ("%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\spark-examples*assembly*.jar") do (
+ set SPARK_EXAMPLES_JAR=%%d
+)
+if "x%SPARK_EXAMPLES_JAR%"=="x" (
+ echo Failed to find Spark examples assembly JAR.
+ echo You need to build Spark with sbt\sbt assembly before running this program.
+ goto exit
+)
+
+rem Compute Spark classpath using external script
+set DONT_PRINT_CLASSPATH=1
+call "%FWDIR%sbin\compute-classpath.cmd"
+set DONT_PRINT_CLASSPATH=0
+set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH%
+
+rem Figure out where java is.
+set RUNNER=java
+if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
+
+"%RUNNER%" -cp "%CLASSPATH%" %JAVA_OPTS% %*
+:exit
diff --git a/bin/slaves.sh b/bin/slaves.sh
deleted file mode 100755
index c367c2fd8e..0000000000
--- a/bin/slaves.sh
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Run a shell command on all slave hosts.
-#
-# Environment Variables
-#
-# SPARK_SLAVES File naming remote hosts.
-# Default is ${SPARK_CONF_DIR}/slaves.
-# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf.
-# SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
-# SPARK_SSH_OPTS Options passed to ssh when running remote commands.
-##
-
-usage="Usage: slaves.sh [--config <conf-dir>] command..."
-
-# if no args specified, show usage
-if [ $# -le 0 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-# If the slaves file is specified in the command line,
-# then it takes precedence over the definition in
-# spark-env.sh. Save it here.
-HOSTLIST=$SPARK_SLAVES
-
-# Check if --config is passed as an argument. It is an optional parameter.
-# Exit if the argument is not a directory.
-if [ "$1" == "--config" ]
-then
- shift
- conf_dir=$1
- if [ ! -d "$conf_dir" ]
- then
- echo "ERROR : $conf_dir is not a directory"
- echo $usage
- exit 1
- else
- export SPARK_CONF_DIR=$conf_dir
- fi
- shift
-fi
-
-if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
- . "${SPARK_CONF_DIR}/spark-env.sh"
-fi
-
-if [ "$HOSTLIST" = "" ]; then
- if [ "$SPARK_SLAVES" = "" ]; then
- export HOSTLIST="${SPARK_CONF_DIR}/slaves"
- else
- export HOSTLIST="${SPARK_SLAVES}"
- fi
-fi
-
-# By default disable strict host key checking
-if [ "$SPARK_SSH_OPTS" = "" ]; then
- SPARK_SSH_OPTS="-o StrictHostKeyChecking=no"
-fi
-
-for slave in `cat "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do
- ssh $SPARK_SSH_OPTS $slave $"${@// /\\ }" \
- 2>&1 | sed "s/^/$slave: /" &
- if [ "$SPARK_SLAVE_SLEEP" != "" ]; then
- sleep $SPARK_SLAVE_SLEEP
- fi
-done
-
-wait
diff --git a/bin/spark-config.sh b/bin/spark-config.sh
deleted file mode 100755
index cd2c7b7b0d..0000000000
--- a/bin/spark-config.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# included in all the spark scripts with source command
-# should not be executable directly
-# also should not be passed any arguments, since we need original $*
-
-# resolve links - $0 may be a softlink
-this="${BASH_SOURCE-$0}"
-common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
-script="$(basename -- "$this")"
-this="$common_bin/$script"
-
-# convert relative path to absolute path
-config_bin=`dirname "$this"`
-script=`basename "$this"`
-config_bin=`cd "$config_bin"; pwd`
-this="$config_bin/$script"
-
-export SPARK_PREFIX=`dirname "$this"`/..
-export SPARK_HOME=${SPARK_PREFIX}
-export SPARK_CONF_DIR="$SPARK_HOME/conf"
diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh
deleted file mode 100755
index a0c0d44b58..0000000000
--- a/bin/spark-daemon.sh
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Runs a Spark command as a daemon.
-#
-# Environment Variables
-#
-# SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_PREFIX}/conf.
-# SPARK_LOG_DIR Where log files are stored. PWD by default.
-# SPARK_MASTER host:path where spark code should be rsync'd from
-# SPARK_PID_DIR The pid files are stored. /tmp by default.
-# SPARK_IDENT_STRING A string representing this instance of spark. $USER by default
-# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
-##
-
-usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>"
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-# get arguments
-
-# Check if --config is passed as an argument. It is an optional parameter.
-# Exit if the argument is not a directory.
-
-if [ "$1" == "--config" ]
-then
- shift
- conf_dir=$1
- if [ ! -d "$conf_dir" ]
- then
- echo "ERROR : $conf_dir is not a directory"
- echo $usage
- exit 1
- else
- export SPARK_CONF_DIR=$conf_dir
- fi
- shift
-fi
-
-startStop=$1
-shift
-command=$1
-shift
-instance=$1
-shift
-
-spark_rotate_log ()
-{
- log=$1;
- num=5;
- if [ -n "$2" ]; then
- num=$2
- fi
- if [ -f "$log" ]; then # rotate logs
- while [ $num -gt 1 ]; do
- prev=`expr $num - 1`
- [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
- num=$prev
- done
- mv "$log" "$log.$num";
- fi
-}
-
-if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
- . "${SPARK_CONF_DIR}/spark-env.sh"
-fi
-
-if [ "$SPARK_IDENT_STRING" = "" ]; then
- export SPARK_IDENT_STRING="$USER"
-fi
-
-
-export SPARK_PRINT_LAUNCH_COMMAND="1"
-
-# get log directory
-if [ "$SPARK_LOG_DIR" = "" ]; then
- export SPARK_LOG_DIR="$SPARK_HOME/logs"
-fi
-mkdir -p "$SPARK_LOG_DIR"
-touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1
-TEST_LOG_DIR=$?
-if [ "${TEST_LOG_DIR}" = "0" ]; then
- rm -f $SPARK_LOG_DIR/.spark_test
-else
- chown $SPARK_IDENT_STRING $SPARK_LOG_DIR
-fi
-
-if [ "$SPARK_PID_DIR" = "" ]; then
- SPARK_PID_DIR=/tmp
-fi
-
-# some variables
-export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.log
-export SPARK_ROOT_LOGGER="INFO,DRFA"
-log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out
-pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid
-
-# Set default scheduling priority
-if [ "$SPARK_NICENESS" = "" ]; then
- export SPARK_NICENESS=0
-fi
-
-
-case $startStop in
-
- (start)
-
- mkdir -p "$SPARK_PID_DIR"
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo $command running as process `cat $pid`. Stop it first.
- exit 1
- fi
- fi
-
- if [ "$SPARK_MASTER" != "" ]; then
- echo rsync from $SPARK_MASTER
- rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
- fi
-
- spark_rotate_log "$log"
- echo starting $command, logging to $log
- cd "$SPARK_PREFIX"
- nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
- newpid=$!
- echo $newpid > $pid
- sleep 2
- # Check if the process has died; in that case we'll tail the log so the user can see
- if ! kill -0 $newpid >/dev/null 2>&1; then
- echo "failed to launch $command:"
- tail -2 "$log" | sed 's/^/ /'
- echo "full log in $log"
- fi
- ;;
-
- (stop)
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo stopping $command
- kill `cat $pid`
- else
- echo no $command to stop
- fi
- else
- echo no $command to stop
- fi
- ;;
-
- (*)
- echo $usage
- exit 1
- ;;
-
-esac
-
-
diff --git a/bin/spark-daemons.sh b/bin/spark-daemons.sh
deleted file mode 100755
index 64286cb2da..0000000000
--- a/bin/spark-daemons.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Run a Spark command on all slave hosts.
-
-usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..."
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@"
diff --git a/bin/spark-shell b/bin/spark-shell
new file mode 100755
index 0000000000..bc7386db4d
--- /dev/null
+++ b/bin/spark-shell
@@ -0,0 +1,102 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Shell script for starting the Spark Shell REPL
+# Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
+# if those two env vars are set in spark-env.sh but MASTER is not.
+# Options:
+# -c <cores> Set the number of cores for REPL to use
+
+cygwin=false
+case "`uname`" in
+ CYGWIN*) cygwin=true;;
+esac
+
+# Enter posix mode for bash
+set -o posix
+
+FWDIR="$(cd `dirname $0`/..; pwd)"
+
+for o in "$@"; do
+ if [ "$1" = "-c" -o "$1" = "--cores" ]; then
+ shift
+ if [ -n "$1" ]; then
+ OPTIONS="-Dspark.cores.max=$1"
+ shift
+ fi
+ fi
+done
+
+# Set MASTER from spark-env if possible
+if [ -z "$MASTER" ]; then
+ if [ -e "$FWDIR/conf/spark-env.sh" ]; then
+ . "$FWDIR/conf/spark-env.sh"
+ fi
+ if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
+ MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
+ export MASTER
+ fi
+fi
+
+# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
+# binary distribution of Spark where Scala is not installed
+exit_status=127
+saved_stty=""
+
+# restore stty settings (echo in particular)
+function restoreSttySettings() {
+ stty $saved_stty
+ saved_stty=""
+}
+
+function onExit() {
+ if [[ "$saved_stty" != "" ]]; then
+ restoreSttySettings
+ fi
+ exit $exit_status
+}
+
+# to reenable echo if we are interrupted before completing.
+trap onExit INT
+
+# save terminal settings
+saved_stty=$(stty -g 2>/dev/null)
+# clear on error so we don't later try to restore them
+if [[ ! $? ]]; then
+ saved_stty=""
+fi
+
+if $cygwin; then
+ # Workaround for issue involving JLine and Cygwin
+ # (see http://sourceforge.net/p/jline/bugs/40/).
+ # If you're using the Mintty terminal emulator in Cygwin, may need to set the
+ # "Backspace sends ^H" setting in "Keys" section of the Mintty options
+ # (see https://github.com/sbt/sbt/issues/562).
+ stty -icanon min 1 -echo > /dev/null 2>&1
+ $FWDIR/sbin/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
+ stty icanon echo > /dev/null 2>&1
+else
+ $FWDIR/sbin/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
+fi
+
+# record the exit status lest it be overwritten:
+# then reenable echo and propagate the code.
+exit_status=$?
+onExit
diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd
new file mode 100644
index 0000000000..23973e3e3d
--- /dev/null
+++ b/bin/spark-shell.cmd
@@ -0,0 +1,23 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements. See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Find the path of sbin
+set SBIN=%~dp0..\sbin\
+
+cmd /V /E /C %SBIN%spark-class2.cmd org.apache.spark.repl.Main %*
diff --git a/bin/start-all.sh b/bin/start-all.sh
deleted file mode 100755
index 0182f1ab24..0000000000
--- a/bin/start-all.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Start all spark daemons.
-# Starts the master on this node.
-# Starts a worker on each node specified in conf/slaves
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-# Load the Spark configuration
-. "$bin/spark-config.sh"
-
-# Start Master
-"$bin"/start-master.sh
-
-# Start Workers
-"$bin"/start-slaves.sh
diff --git a/bin/start-master.sh b/bin/start-master.sh
deleted file mode 100755
index 648c7ae75f..0000000000
--- a/bin/start-master.sh
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Starts the master on the machine this script is executed on.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
- . "${SPARK_CONF_DIR}/spark-env.sh"
-fi
-
-if [ "$SPARK_MASTER_PORT" = "" ]; then
- SPARK_MASTER_PORT=7077
-fi
-
-if [ "$SPARK_MASTER_IP" = "" ]; then
- SPARK_MASTER_IP=`hostname`
-fi
-
-if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
- SPARK_MASTER_WEBUI_PORT=8080
-fi
-
-# Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves
-if [ "$SPARK_PUBLIC_DNS" = "" ]; then
- # If we appear to be running on EC2, use the public address by default:
- # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname
- if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then
- export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
- fi
-fi
-
-"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
diff --git a/bin/start-slave.sh b/bin/start-slave.sh
deleted file mode 100755
index 4eefa20944..0000000000
--- a/bin/start-slave.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Usage: start-slave.sh <worker#> <master-spark-URL>
-# where <master-spark-URL> is like "spark://localhost:7077"
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI
-if [ "$SPARK_PUBLIC_DNS" = "" ]; then
- # If we appear to be running on EC2, use the public address by default:
- # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname
- if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then
- export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
- fi
-fi
-
-"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
diff --git a/bin/start-slaves.sh b/bin/start-slaves.sh
deleted file mode 100755
index 00dc4888b2..0000000000
--- a/bin/start-slaves.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
- . "${SPARK_CONF_DIR}/spark-env.sh"
-fi
-
-# Find the port number for the master
-if [ "$SPARK_MASTER_PORT" = "" ]; then
- SPARK_MASTER_PORT=7077
-fi
-
-if [ "$SPARK_MASTER_IP" = "" ]; then
- SPARK_MASTER_IP=`hostname`
-fi
-
-# Launch the slaves
-if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
- exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
-else
- if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
- SPARK_WORKER_WEBUI_PORT=8081
- fi
- for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
- "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
- done
-fi
diff --git a/bin/stop-all.sh b/bin/stop-all.sh
deleted file mode 100755
index b6c83a7ba4..0000000000
--- a/bin/stop-all.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Start all spark daemons.
-# Run this on the master nde
-
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-# Load the Spark configuration
-. "$bin/spark-config.sh"
-
-# Stop the slaves, then the master
-"$bin"/stop-slaves.sh
-"$bin"/stop-master.sh
diff --git a/bin/stop-master.sh b/bin/stop-master.sh
deleted file mode 100755
index 310e33bedc..0000000000
--- a/bin/stop-master.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Starts the master on the machine this script is executed on.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
diff --git a/bin/stop-slaves.sh b/bin/stop-slaves.sh
deleted file mode 100755
index fcb8555d4e..0000000000
--- a/bin/stop-slaves.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin/spark-config.sh"
-
-if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
- . "${SPARK_CONF_DIR}/spark-env.sh"
-fi
-
-if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
- "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
-else
- for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
- "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
- done
-fi