#!/bin/bash # Figure out where the Scala framework is installed FWDIR=`dirname $0` # Load environment variables from conf/spark-env.sh, if it exists if [ -e $FWDIR/conf/spark-env.sh ] ; then . $FWDIR/conf/spark-env.sh fi if [ "$SPARK_MEM" == "" ] ; then SPARK_MEM="200m" fi # Set JAVA_OPTS to be able to load native libraries and to set heap size JAVA_OPTS="$SPARK_JAVA_OPTS" JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH:$FWDIR/third_party:$FWDIR/src/native" JAVA_OPTS+=" -Xms$SPARK_MEM -Xmx$SPARK_MEM" # Load extra JAVA_OPTS from conf/java-opts, if it exists if [ -e $FWDIR/conf/java-opts ] ; then JAVA_OPTS+=" `cat $FWDIR/conf/java-opts`" fi export JAVA_OPTS # Build up classpath SPARK_CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes" SPARK_CLASSPATH+=:$FWDIR/third_party/nexus.jar SPARK_CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar SPARK_CLASSPATH+=:$FWDIR/third_party/colt.jar SPARK_CLASSPATH+=:$FWDIR/third_party/google-collect-1.0-rc5/google-collect-1.0-rc5.jar SPARK_CLASSPATH+=:$FWDIR/third_party/hadoop-0.20.0/hadoop-0.20.0-core.jar SPARK_CLASSPATH+=:third_party/scalatest-1.2-for-scala-2.8.0.RC3-SNAPSHOT.jar SPARK_CLASSPATH+=:third_party/scalacheck_2.8.0.RC3-1.7.jar for jar in $FWDIR/third_party/hadoop-0.20.0/lib/*.jar; do SPARK_CLASSPATH+=:$jar done export SPARK_CLASSPATH export CLASSPATH=$SPARK_CLASSPATH # Needed for spark-shell if [ -n "$SCALA_HOME" ]; then SCALA=${SCALA_HOME}/bin/scala else SCALA=scala fi exec $SCALA -cp $SPARK_CLASSPATH $@