aboutsummaryrefslogtreecommitdiff
path: root/run
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2010-07-19 18:00:30 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2010-07-19 18:00:30 -0700
commit0435de9e8710ffd2d24a65ef4371529e79d3bf3c (patch)
treed8edca9a808d8615dadb110b3c10aa7fd1ee1344 /run
parentedad598684236d6271ce7853a8312081d15a28a6 (diff)
downloadspark-0435de9e8710ffd2d24a65ef4371529e79d3bf3c.tar.gz
spark-0435de9e8710ffd2d24a65ef4371529e79d3bf3c.tar.bz2
spark-0435de9e8710ffd2d24a65ef4371529e79d3bf3c.zip
Made it possible to set various Spark options and environment variables
in general through a conf/spark-env.sh script.
Diffstat (limited to 'run')
-rwxr-xr-xrun39
1 files changed, 26 insertions, 13 deletions
diff --git a/run b/run
index e6723ccd7c..36fbd9d23d 100755
--- a/run
+++ b/run
@@ -3,26 +3,39 @@
# Figure out where the Scala framework is installed
FWDIR=`dirname $0`
-# Set JAVA_OPTS to be able to load libnexus.so and set various other misc options
-export JAVA_OPTS="-Djava.library.path=$FWDIR/third_party:$FWDIR/src/native -Xms100m -Xmx750m"
+# Load environment variables from conf/spark-env.sh, if it exists
+if [ -e $FWDIR/conf/spark-env.sh ] ; then
+ . $FWDIR/conf/spark-env.sh
+fi
+
+if [ "$SPARK_MEM" == "" ] ; then
+ SPARK_MEM="200m"
+fi
+
+# Set JAVA_OPTS to be able to load native libraries and to set heap size
+JAVA_OPTS="$SPARK_JAVA_OPTS"
+JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH:$FWDIR/third_party:$FWDIR/src/native"
+JAVA_OPTS+=" -Xms$SPARK_MEM -Xmx$SPARK_MEM"
+# Load extra JAVA_OPTS from conf/java-opts, if it exists
if [ -e $FWDIR/conf/java-opts ] ; then
JAVA_OPTS+=" `cat $FWDIR/conf/java-opts`"
fi
export JAVA_OPTS
# Build up classpath
-CLASSPATH=$FWDIR/build/classes
-CLASSPATH+=:$FWDIR/third_party/nexus.jar
-CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar
-CLASSPATH+=:$FWDIR/third_party/colt.jar
-CLASSPATH+=:$FWDIR/third_party/google-collect-1.0-rc5/google-collect-1.0-rc5.jar
-CLASSPATH+=:$FWDIR/third_party/hadoop-0.20.0/hadoop-0.20.0-core.jar
-CLASSPATH+=:third_party/scalatest-1.2-for-scala-2.8.0.RC3-SNAPSHOT.jar
-CLASSPATH+=:third_party/scalacheck_2.8.0.RC3-1.7.jar
+SPARK_CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes"
+SPARK_CLASSPATH+=:$FWDIR/third_party/nexus.jar
+SPARK_CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar
+SPARK_CLASSPATH+=:$FWDIR/third_party/colt.jar
+SPARK_CLASSPATH+=:$FWDIR/third_party/google-collect-1.0-rc5/google-collect-1.0-rc5.jar
+SPARK_CLASSPATH+=:$FWDIR/third_party/hadoop-0.20.0/hadoop-0.20.0-core.jar
+SPARK_CLASSPATH+=:third_party/scalatest-1.2-for-scala-2.8.0.RC3-SNAPSHOT.jar
+SPARK_CLASSPATH+=:third_party/scalacheck_2.8.0.RC3-1.7.jar
for jar in $FWDIR/third_party/hadoop-0.20.0/lib/*.jar; do
- CLASSPATH+=:$jar
+ SPARK_CLASSPATH+=:$jar
done
-export CLASSPATH
+export SPARK_CLASSPATH
+export CLASSPATH=$SPARK_CLASSPATH # Needed for spark-shell
if [ -n "$SCALA_HOME" ]; then
SCALA=${SCALA_HOME}/bin/scala
@@ -30,4 +43,4 @@ else
SCALA=scala
fi
-exec $SCALA -cp $CLASSPATH $@
+exec $SCALA -cp $SPARK_CLASSPATH $@