aboutsummaryrefslogtreecommitdiff
path: root/sbin
diff options
context:
space:
mode:
Diffstat (limited to 'sbin')
-rw-r--r--sbin/compute-classpath.cmd2
-rwxr-xr-xsbin/compute-classpath.sh26
-rwxr-xr-xsbin/slaves.sh19
-rwxr-xr-xsbin/spark-class49
-rw-r--r--sbin/spark-class2.cmd9
-rwxr-xr-xsbin/spark-daemon.sh21
-rwxr-xr-xsbin/spark-daemons.sh2
-rwxr-xr-xsbin/stop-slaves.sh6
8 files changed, 113 insertions, 21 deletions
diff --git a/sbin/compute-classpath.cmd b/sbin/compute-classpath.cmd
index e0b8a8ef5f..4f60bff19c 100644
--- a/sbin/compute-classpath.cmd
+++ b/sbin/compute-classpath.cmd
@@ -20,7 +20,7 @@ rem
rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
rem script and the ExecutorRunner in standalone cluster mode.
-set SCALA_VERSION=2.9.3
+set SCALA_VERSION=2.10
rem Figure out where the Spark framework is installed
set FWDIR=%~dp0..\
diff --git a/sbin/compute-classpath.sh b/sbin/compute-classpath.sh
index cfe5fe7bef..0c82310421 100755
--- a/sbin/compute-classpath.sh
+++ b/sbin/compute-classpath.sh
@@ -20,7 +20,7 @@
# This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
# script and the ExecutorRunner in standalone cluster mode.
-SCALA_VERSION=2.9.3
+SCALA_VERSION=2.10
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
@@ -31,13 +31,27 @@ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
fi
# Build up classpath
-CLASSPATH="$FWDIR/conf"
-if [ -f "$FWDIR/RELEASE" ]; then
- ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
+CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
+
+# First check if we have a dependencies jar. If so, include binary classes with the deps jar
+if [ -f "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar ]; then
+ CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
+ CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
+ CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
+ CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
+ CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes"
+
+ DEPS_ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar`
+ CLASSPATH="$CLASSPATH:$DEPS_ASSEMBLY_JAR"
else
- ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar`
+ # Else use spark-assembly jar from either RELEASE or assembly directory
+ if [ -f "$FWDIR/RELEASE" ]; then
+ ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
+ else
+ ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar`
+ fi
+ CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
fi
-CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1
if [[ $SPARK_TESTING == 1 ]]; then
diff --git a/sbin/slaves.sh b/sbin/slaves.sh
index 68408bcad8..a5bc2183d8 100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@ -28,7 +28,7 @@
# SPARK_SSH_OPTS Options passed to ssh when running remote commands.
##
-usage="Usage: slaves.sh [--config confdir] command..."
+usage="Usage: slaves.sh [--config <conf-dir>] command..."
# if no args specified, show usage
if [ $# -le 0 ]; then
@@ -46,6 +46,23 @@ sbin=`cd "$sbin"; pwd`
# spark-env.sh. Save it here.
HOSTLIST=$SPARK_SLAVES
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir=$1
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR=$conf_dir
+ fi
+ shift
+fi
+
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
fi
diff --git a/sbin/spark-class b/sbin/spark-class
index 3bdc29e543..4e440d8729 100755
--- a/sbin/spark-class
+++ b/sbin/spark-class
@@ -17,7 +17,12 @@
# limitations under the License.
#
-SCALA_VERSION=2.9.3
+cygwin=false
+case "`uname`" in
+ CYGWIN*) cygwin=true;;
+esac
+
+SCALA_VERSION=2.10
# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
@@ -55,7 +60,7 @@ case "$1" in
'org.apache.spark.deploy.worker.Worker')
OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_WORKER_OPTS"
;;
- 'org.apache.spark.executor.StandaloneExecutorBackend')
+ 'org.apache.spark.executor.CoarseGrainedExecutorBackend')
OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
;;
'org.apache.spark.executor.MesosExecutorBackend')
@@ -95,16 +100,46 @@ export JAVA_OPTS
if [ ! -f "$FWDIR/RELEASE" ]; then
# Exit if the user hasn't compiled Spark
- ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
- if [[ $? != 0 ]]; then
- echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
- echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
+ num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar" | wc -l)
+ jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar")
+ if [ "$num_jars" -eq "0" ]; then
+ echo "Failed to find Spark assembly in $FWDIR/assembly/target/scala-$SCALA_VERSION/" >&2
+ echo "You need to build Spark with 'sbt/sbt assembly' before running this program." >&2
+ exit 1
+ fi
+ if [ "$num_jars" -gt "1" ]; then
+ echo "Found multiple Spark assembly jars in $FWDIR/assembly/target/scala-$SCALA_VERSION:" >&2
+ echo "$jars_list"
+ echo "Please remove all but one jar."
exit 1
fi
fi
+TOOLS_DIR="$FWDIR"/tools
+SPARK_TOOLS_JAR=""
+if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
+ # Use the JAR from the SBT build
+ export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
+fi
+if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
+ # Use the JAR from the Maven build
+ # TODO: this also needs to become an assembly!
+ export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
+fi
+
# Compute classpath using external script
CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
+
+if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
+ CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
+fi
+
+if $cygwin; then
+ CLASSPATH=`cygpath -wp $CLASSPATH`
+ if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
+ export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
+ fi
+fi
export CLASSPATH
if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
@@ -115,3 +150,5 @@ if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
fi
exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
+
+
diff --git a/sbin/spark-class2.cmd b/sbin/spark-class2.cmd
index 5e00bd39ea..460e661476 100644
--- a/sbin/spark-class2.cmd
+++ b/sbin/spark-class2.cmd
@@ -17,7 +17,7 @@ rem See the License for the specific language governing permissions and
rem limitations under the License.
rem
-set SCALA_VERSION=2.9.3
+set SCALA_VERSION=2.10
rem Figure out where the Spark framework is installed
set FWDIR=%~dp0..\
@@ -65,10 +65,17 @@ if "%FOUND_JAR%"=="0" (
)
:skip_build_test
+set TOOLS_DIR=%FWDIR%tools
+set SPARK_TOOLS_JAR=
+for %%d in ("%TOOLS_DIR%\target\scala-%SCALA_VERSION%\spark-tools*assembly*.jar") do (
+ set SPARK_TOOLS_JAR=%%d
+)
+
rem Compute classpath using external script
set DONT_PRINT_CLASSPATH=1
call "%FWDIR%sbin\compute-classpath.cmd"
set DONT_PRINT_CLASSPATH=0
+set CLASSPATH=%CLASSPATH%;%SPARK_TOOLS_JAR%
rem Figure out where java is.
set RUNNER=java
diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh
index ae82349cc6..ca6b893b9b 100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@ -29,7 +29,7 @@
# SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
##
-usage="Usage: spark-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <spark-command> <spark-instance-number> <args...>"
+usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>"
# if no args specified, show usage
if [ $# -le 1 ]; then
@@ -43,6 +43,25 @@ sbin=`cd "$sbin"; pwd`
. "$sbin/spark-config.sh"
# get arguments
+
+# Check if --config is passed as an argument. It is an optional parameter.
+# Exit if the argument is not a directory.
+
+if [ "$1" == "--config" ]
+then
+ shift
+ conf_dir=$1
+ if [ ! -d "$conf_dir" ]
+ then
+ echo "ERROR : $conf_dir is not a directory"
+ echo $usage
+ exit 1
+ else
+ export SPARK_CONF_DIR=$conf_dir
+ fi
+ shift
+fi
+
startStop=$1
shift
command=$1
diff --git a/sbin/spark-daemons.sh b/sbin/spark-daemons.sh
index d91254b690..5d9f2bb51c 100755
--- a/sbin/spark-daemons.sh
+++ b/sbin/spark-daemons.sh
@@ -19,7 +19,7 @@
# Run a Spark command on all slave hosts.
-usage="Usage: spark-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] command instance-number args..."
+usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..."
# if no args specified, show usage
if [ $# -le 1 ]; then
diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh
index 63802e6df5..c6b0b6ab66 100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@ -17,10 +17,8 @@
# limitations under the License.
#
-# Starts the master on the machine this script is executed on.
-
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+bin=`dirname "$0"`
+bin=`cd "$sbin"; pwd`
. "$sbin/spark-config.sh"