diff options
author | Hossein Falaki <falaki@gmail.com> | 2014-01-07 15:22:42 -0800 |
---|---|---|
committer | Hossein Falaki <falaki@gmail.com> | 2014-01-07 15:22:42 -0800 |
commit | 3a8beb46cb53cf6807f39cca54b1efdbbc303f41 (patch) | |
tree | 609e141e3b34212c5d171e593dab803aaafdd7aa /make-distribution.sh | |
parent | 754f5300a1e0a214b62cbd6db2398dea4dfbceb4 (diff) | |
parent | 7d5fa175ca9cd2260c7bcd18c201bc087d4f62c3 (diff) | |
download | spark-3a8beb46cb53cf6807f39cca54b1efdbbc303f41.tar.gz spark-3a8beb46cb53cf6807f39cca54b1efdbbc303f41.tar.bz2 spark-3a8beb46cb53cf6807f39cca54b1efdbbc303f41.zip |
Merge branch 'master' into MatrixFactorizationModel-fix
Diffstat (limited to 'make-distribution.sh')
-rwxr-xr-x | make-distribution.sh | 27 |
1 files changed, 18 insertions, 9 deletions
diff --git a/make-distribution.sh b/make-distribution.sh index 32bbdb90a5..1a3a5d0209 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -31,10 +31,10 @@ # # Recommended deploy/testing procedure (standalone mode): # 1) Rsync / deploy the dist/ dir to one host -# 2) cd to deploy dir; ./bin/start-master.sh +# 2) cd to deploy dir; ./sbin/start-master.sh # 3) Verify master is up by visiting web page, ie http://master-ip:8080. Note the spark:// URL. -# 4) ./bin/start-slave.sh 1 <<spark:// URL>> -# 5) MASTER="spark://my-master-ip:7077" ./spark-shell +# 4) ./sbin/start-slave.sh 1 <<spark:// URL>> +# 5) MASTER="spark://my-master-ip:7077" ./bin/spark-shell # # Figure out where the Spark framework is installed @@ -43,7 +43,17 @@ DISTDIR="$FWDIR/dist" # Get version from SBT export TERM=dumb # Prevents color codes in SBT output -VERSION=$($FWDIR/sbt/sbt "show version" | tail -1 | cut -f 2 | sed 's/^\([a-zA-Z0-9.-]*\).*/\1/') + +VERSIONSTRING=$FWDIR/sbt/sbt "show version" + +if [ $? == -1 ] ;then + echo -e "You need sbt installed and available on your path." + echo -e "Download sbt from http://www.scala-sbt.org/" + exit -1; +fi + +VERSION=$(echo "${VERSIONSTRING}" | tail -1 | cut -f 2 | sed 's/^\([a-zA-Z0-9.-]*\).*/\1/') +echo "Version is ${VERSION}" # Initialize defaults SPARK_HADOOP_VERSION=1.0.4 @@ -83,7 +93,9 @@ fi # Build fat JAR export SPARK_HADOOP_VERSION export SPARK_YARN -"$FWDIR/sbt/sbt" "assembly/assembly" +cd $FWDIR + +"sbt/sbt" "assembly/assembly" # Make directories rm -rf "$DISTDIR" @@ -98,10 +110,7 @@ mkdir "$DISTDIR"/conf cp "$FWDIR"/conf/*.template "$DISTDIR"/conf cp -r "$FWDIR/bin" "$DISTDIR" cp -r "$FWDIR/python" "$DISTDIR" -cp "$FWDIR/spark-class" "$DISTDIR" -cp "$FWDIR/spark-shell" "$DISTDIR" -cp "$FWDIR/spark-executor" "$DISTDIR" -cp "$FWDIR/pyspark" "$DISTDIR" +cp -r "$FWDIR/sbin" "$DISTDIR" if [ "$MAKE_TGZ" == "true" ]; then |