diff options
author | Matei Zaharia <matei@eecs.berkeley.edu> | 2013-02-26 12:24:18 -0800 |
---|---|---|
committer | Matei Zaharia <matei@eecs.berkeley.edu> | 2013-02-26 12:24:18 -0800 |
commit | 434a1ce7739b4f1abe93408edaff9388e480d806 (patch) | |
tree | ec7a6e02884b25ef894b4c5972064f28cb9b10e3 /run | |
parent | ece3edfffa02f90a71569961b91bf44041f21afe (diff) | |
download | spark-434a1ce7739b4f1abe93408edaff9388e480d806.tar.gz spark-434a1ce7739b4f1abe93408edaff9388e480d806.tar.bz2 spark-434a1ce7739b4f1abe93408edaff9388e480d806.zip |
Small hack to work around multiple JARs being built by sbt package
Diffstat (limited to 'run')
-rwxr-xr-x | run | 11 |
1 files changed, 6 insertions, 5 deletions
@@ -134,14 +134,15 @@ for jar in `find $PYSPARK_DIR/lib -name '*jar'`; do done export CLASSPATH # Needed for spark-shell -# Figure out the JAR file that our examples were packaged into. -if [ -e "$EXAMPLES_DIR/target/scala-$SCALA_VERSION/spark-examples"*".jar" ]; then +# Figure out the JAR file that our examples were packaged into. This includes a bit of a hack +# to avoid the -sources and -doc packages that are built by publish-local. +if [ -e "$EXAMPLES_DIR/target/scala-$SCALA_VERSION/spark-examples"*[0-9T].jar ]; then # Use the JAR from the SBT build - export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR/target/scala-$SCALA_VERSION/spark-examples"*".jar"` + export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR/target/scala-$SCALA_VERSION/spark-examples"*[0-9T].jar` fi -if [ -e "$EXAMPLES_DIR/target/spark-examples-"*hadoop*".jar" ]; then +if [ -e "$EXAMPLES_DIR/target/spark-examples-"*hadoop[12].jar ]; then # Use the JAR from the Maven build - export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR/target/spark-examples-"*hadoop*".jar"` + export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR/target/spark-examples-"*hadoop[12].jar` fi # Figure out whether to run our class with java or with the scala launcher. |