From 08cda89e8a05caf453f46fa1dcf00d67535805f1 Mon Sep 17 00:00:00 2001 From: Matei Zaharia Date: Sat, 17 Mar 2012 13:39:14 -0700 Subject: Further fixes to how Mesos is found and used --- conf/spark-env.sh.template | 2 +- core/src/main/scala/spark/MesosScheduler.scala | 1 - repl/src/test/scala/spark/repl/ReplSuite.scala | 2 +- run | 44 ++++++++++++++------------ 4 files changed, 25 insertions(+), 24 deletions(-) diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index 6852b23a34..532a635a1b 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -2,7 +2,7 @@ # Set Spark environment variables for your site in this file. Some useful # variables to set are: -# - MESOS_HOME, to point to your Mesos installation +# - MESOS_NATIVE_LIBRARY, to point to your Mesos native library (libmesos.so) # - SCALA_HOME, to point to your Scala installation # - SPARK_CLASSPATH, to add elements to Spark's classpath # - SPARK_JAVA_OPTS, to add JVM options diff --git a/core/src/main/scala/spark/MesosScheduler.scala b/core/src/main/scala/spark/MesosScheduler.scala index bc9a65457d..71b0c29162 100644 --- a/core/src/main/scala/spark/MesosScheduler.scala +++ b/core/src/main/scala/spark/MesosScheduler.scala @@ -133,7 +133,6 @@ private class MesosScheduler( .build()) } } - environment.build() val memory = Resource.newBuilder() .setName("mem") .setType(Value.Type.SCALAR) diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/spark/repl/ReplSuite.scala index b8442238b8..15ebf0c9b8 100644 --- a/repl/src/test/scala/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/spark/repl/ReplSuite.scala @@ -119,7 +119,7 @@ class ReplSuite extends FunSuite { assertContains("res2: Array[Int] = Array(5, 0, 0, 0, 0)", output) } - if (System.getenv("MESOS_HOME") != null) { + if (System.getenv("MESOS_NATIVE_LIBRARY") != null) { test ("running on Mesos") { val output = runInterpreter("localquiet", """ var v = 7 diff --git a/run b/run index c7fc65b5f6..2bc025ec0b 100755 --- a/run +++ b/run @@ -13,22 +13,22 @@ if [ -e $FWDIR/conf/spark-env.sh ] ; then . $FWDIR/conf/spark-env.sh fi +# If the user specifies a Mesos JAR, put it before our included one on the classpath MESOS_CLASSPATH="" -MESOS_LIBRARY_PATH="" - -if [ "x$MESOS_HOME" != "x" ] ; then - MESOS_CLASSPATH="$MESOS_HOME/lib/java/mesos.jar" - MESOS_LIBRARY_PATH="$MESOS_HOME/lib/java" +if [ "x$MESOS_JAR" != "x" ] ; then + MESOS_CLASSPATH="$MESOS_JAR" fi +# Figure out how much memory to use per executor and set it as an environment +# variable so that our process sees it and can report it to Mesos if [ "x$SPARK_MEM" == "x" ] ; then SPARK_MEM="512m" fi -export SPARK_MEM # So that the process sees it and can report it to Mesos +export SPARK_MEM # Set JAVA_OPTS to be able to load native libraries and to set heap size JAVA_OPTS="$SPARK_JAVA_OPTS" -JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH:$FWDIR/lib:$FWDIR/src/main/native:$MESOS_LIBRARY_PATH" +JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH" JAVA_OPTS+=" -Xms$SPARK_MEM -Xmx$SPARK_MEM" # Load extra JAVA_OPTS from conf/java-opts, if it exists if [ -e $FWDIR/conf/java-opts ] ; then @@ -36,35 +36,37 @@ if [ -e $FWDIR/conf/java-opts ] ; then fi export JAVA_OPTS -CORE_DIR=$FWDIR/core -REPL_DIR=$FWDIR/repl -EXAMPLES_DIR=$FWDIR/examples -BAGEL_DIR=$FWDIR/bagel +CORE_DIR="$FWDIR/core" +REPL_DIR="$FWDIR/repl" +EXAMPLES_DIR="$FWDIR/examples" +BAGEL_DIR="$FWDIR/bagel" # Build up classpath -CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala-$SCALA_VERSION/classes:$MESOS_CLASSPATH" -CLASSPATH+=:$FWDIR/conf -CLASSPATH+=:$REPL_DIR/target/scala-$SCALA_VERSION/classes -CLASSPATH+=:$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes +CLASSPATH="$SPARK_CLASSPATH" +CLASSPATH+=":$MESOS_CLASSPATH" +CLASSPATH+=":$FWDIR/conf" +CLASSPATH+=":$CORE_DIR/target/scala-$SCALA_VERSION/classes" +CLASSPATH+=":$REPL_DIR/target/scala-$SCALA_VERSION/classes" +CLASSPATH+=":$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes" for jar in `find $CORE_DIR/lib -name '*jar'`; do - CLASSPATH+=:$jar + CLASSPATH+=":$jar" done for jar in `find $FWDIR/lib_managed/jars -name '*jar'`; do - CLASSPATH+=:$jar + CLASSPATH+=":$jar" done for jar in `find $FWDIR/lib_managed/bundles -name '*jar'`; do - CLASSPATH+=:$jar + CLASSPATH+=":$jar" done for jar in `find $REPL_DIR/lib -name '*jar'`; do - CLASSPATH+=:$jar + CLASSPATH+=":$jar" done CLASSPATH+=:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes export CLASSPATH # Needed for spark-shell if [ -n "$SCALA_HOME" ]; then - SCALA=${SCALA_HOME}/bin/scala + SCALA="${SCALA_HOME}/bin/scala" else SCALA=scala fi -exec $SCALA -cp $CLASSPATH "$@" +exec "$SCALA" -cp "$CLASSPATH" "$@" -- cgit v1.2.3