diff options
author | Raymond Liu <raymond.liu@intel.com> | 2013-11-12 15:14:21 +0800 |
---|---|---|
committer | Raymond Liu <raymond.liu@intel.com> | 2013-11-13 16:55:11 +0800 |
commit | 0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16 (patch) | |
tree | 60f01110b170ff72347e1ae6209f898712578ed3 /bin | |
parent | 5429d62dfa16305eb23d67dfe38172803c80db65 (diff) | |
parent | 3d4ad84b63e440fd3f4b3edb1b120ff7c14a42d1 (diff) | |
download | spark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.tar.gz spark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.tar.bz2 spark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.zip |
Merge branch 'master' into scala-2.10
Diffstat (limited to 'bin')
-rwxr-xr-x | bin/compute-classpath.sh | 22 | ||||
-rwxr-xr-x | bin/slaves.sh | 19 | ||||
-rwxr-xr-x | bin/spark-daemon.sh | 21 | ||||
-rwxr-xr-x | bin/spark-daemons.sh | 2 | ||||
-rwxr-xr-x | bin/stop-slaves.sh | 2 |
5 files changed, 57 insertions, 9 deletions
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh index 4fe3d0ef3a..40555089fc 100755 --- a/bin/compute-classpath.sh +++ b/bin/compute-classpath.sh @@ -32,12 +32,26 @@ fi # Build up classpath CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf" -if [ -f "$FWDIR/RELEASE" ]; then - ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar` + +# First check if we have a dependencies jar. If so, include binary classes with the deps jar +if [ -f "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar ]; then + CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes" + CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes" + CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes" + CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes" + CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes" + + DEPS_ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar` + CLASSPATH="$CLASSPATH:$DEPS_ASSEMBLY_JAR" else - ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar` + # Else use spark-assembly jar from either RELEASE or assembly directory + if [ -f "$FWDIR/RELEASE" ]; then + ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar` + else + ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar` + fi + CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR" fi -CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR" # Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1 if [[ $SPARK_TESTING == 1 ]]; then diff --git a/bin/slaves.sh b/bin/slaves.sh index 752565b759..c367c2fd8e 100755 --- a/bin/slaves.sh +++ b/bin/slaves.sh @@ -28,7 +28,7 @@ # SPARK_SSH_OPTS Options passed to ssh when running remote commands. ## -usage="Usage: slaves.sh [--config confdir] command..." +usage="Usage: slaves.sh [--config <conf-dir>] command..." # if no args specified, show usage if [ $# -le 0 ]; then @@ -46,6 +46,23 @@ bin=`cd "$bin"; pwd` # spark-env.sh. Save it here. HOSTLIST=$SPARK_SLAVES +# Check if --config is passed as an argument. It is an optional parameter. +# Exit if the argument is not a directory. +if [ "$1" == "--config" ] +then + shift + conf_dir=$1 + if [ ! -d "$conf_dir" ] + then + echo "ERROR : $conf_dir is not a directory" + echo $usage + exit 1 + else + export SPARK_CONF_DIR=$conf_dir + fi + shift +fi + if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" fi diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh index 5bfe967fbf..a0c0d44b58 100755 --- a/bin/spark-daemon.sh +++ b/bin/spark-daemon.sh @@ -29,7 +29,7 @@ # SPARK_NICENESS The scheduling priority for daemons. Defaults to 0. ## -usage="Usage: spark-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <spark-command> <spark-instance-number> <args...>" +usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>" # if no args specified, show usage if [ $# -le 1 ]; then @@ -43,6 +43,25 @@ bin=`cd "$bin"; pwd` . "$bin/spark-config.sh" # get arguments + +# Check if --config is passed as an argument. It is an optional parameter. +# Exit if the argument is not a directory. + +if [ "$1" == "--config" ] +then + shift + conf_dir=$1 + if [ ! -d "$conf_dir" ] + then + echo "ERROR : $conf_dir is not a directory" + echo $usage + exit 1 + else + export SPARK_CONF_DIR=$conf_dir + fi + shift +fi + startStop=$1 shift command=$1 diff --git a/bin/spark-daemons.sh b/bin/spark-daemons.sh index 354eb905a1..64286cb2da 100755 --- a/bin/spark-daemons.sh +++ b/bin/spark-daemons.sh @@ -19,7 +19,7 @@ # Run a Spark command on all slave hosts. -usage="Usage: spark-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] command instance-number args..." +usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..." # if no args specified, show usage if [ $# -le 1 ]; then diff --git a/bin/stop-slaves.sh b/bin/stop-slaves.sh index 03e416a132..fcb8555d4e 100755 --- a/bin/stop-slaves.sh +++ b/bin/stop-slaves.sh @@ -17,8 +17,6 @@ # limitations under the License. # -# Starts the master on the machine this script is executed on. - bin=`dirname "$0"` bin=`cd "$bin"; pwd` |