aboutsummaryrefslogtreecommitdiff
path: root/run
diff options
context:
space:
mode:
authorhaitao.yao <yao.erix@gmail.com>2013-01-24 10:27:02 +0800
committerhaitao.yao <yao.erix@gmail.com>2013-01-24 10:27:02 +0800
commit97e242067b9d75abd88543c759d8fc0aebd9eb8c (patch)
tree550ebeecf7d7edc0b2825561db0e5c835eb16512 /run
parentdf9ae8a74e2d42f627a82f9b0a8123a4d54b1d09 (diff)
parent548856a22403f6a76d67570db6fa448b2a0e5ad3 (diff)
downloadspark-97e242067b9d75abd88543c759d8fc0aebd9eb8c.tar.gz
spark-97e242067b9d75abd88543c759d8fc0aebd9eb8c.tar.bz2
spark-97e242067b9d75abd88543c759d8fc0aebd9eb8c.zip
Merge branch 'mesos'
Diffstat (limited to 'run')
-rwxr-xr-xrun27
1 files changed, 18 insertions, 9 deletions
diff --git a/run b/run
index 1528f83534..a094629449 100755
--- a/run
+++ b/run
@@ -63,6 +63,15 @@ CORE_DIR="$FWDIR/core"
REPL_DIR="$FWDIR/repl"
EXAMPLES_DIR="$FWDIR/examples"
BAGEL_DIR="$FWDIR/bagel"
+STREAMING_DIR="$FWDIR/streaming"
+PYSPARK_DIR="$FWDIR/python"
+
+# Exit if the user hasn't compiled Spark
+if [ ! -e "$REPL_DIR/target" ]; then
+ echo "Failed to find Spark classes in $REPL_DIR/target" >&2
+ echo "You need to compile Spark before running this program" >&2
+ exit 1
+fi
# Build up classpath
CLASSPATH="$SPARK_CLASSPATH"
@@ -74,21 +83,21 @@ fi
CLASSPATH+=":$CORE_DIR/src/main/resources"
CLASSPATH+=":$REPL_DIR/target/scala-$SCALA_VERSION/classes"
CLASSPATH+=":$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes"
+CLASSPATH+=":$STREAMING_DIR/target/scala-$SCALA_VERSION/classes"
if [ -e "$FWDIR/lib_managed" ]; then
- for jar in `find "$FWDIR/lib_managed/jars" -name '*jar'`; do
- CLASSPATH+=":$jar"
- done
- for jar in `find "$FWDIR/lib_managed/bundles" -name '*jar'`; do
+ CLASSPATH+=":$FWDIR/lib_managed/jars/*"
+ CLASSPATH+=":$FWDIR/lib_managed/bundles/*"
+fi
+CLASSPATH+=":$REPL_DIR/lib/*"
+if [ -e repl-bin/target ]; then
+ for jar in `find "repl-bin/target" -name 'spark-repl-*-shaded-hadoop*.jar'`; do
CLASSPATH+=":$jar"
done
fi
-for jar in `find "$REPL_DIR/lib" -name '*jar'`; do
- CLASSPATH+=":$jar"
-done
-for jar in `find "$REPL_DIR/target" -name 'spark-repl-*-shaded-hadoop*.jar'`; do
+CLASSPATH+=":$BAGEL_DIR/target/scala-$SCALA_VERSION/classes"
+for jar in `find $PYSPARK_DIR/lib -name '*jar'`; do
CLASSPATH+=":$jar"
done
-CLASSPATH+=":$BAGEL_DIR/target/scala-$SCALA_VERSION/classes"
export CLASSPATH # Needed for spark-shell
# Figure out whether to run our class with java or with the scala launcher.