From 1cbef081e3d1dcc647b49a2f2b5f13ceaa0f611d Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Mon, 30 Dec 2013 12:46:09 -0800 Subject: Response to Shivaram's review --- core/src/main/scala/org/apache/spark/Logging.scala | 2 +- spark-class | 31 ++++++++++++---------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index a8892737fa..b97697d587 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -93,7 +93,7 @@ trait Logging { // threads do it concurrently (as SLF4J initialization is not thread safe). protected def initLogging() { // If Log4j doesn't seem initialized, load a default properties file - def log4jInitialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements + val log4jInitialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements if (!log4jInitialized) { val defaultLogProps = "org/apache/spark/default-log4j.properties" val classLoader = this.getClass.getClassLoader diff --git a/spark-class b/spark-class index 1c4323ee53..1858ea6247 100755 --- a/spark-class +++ b/spark-class @@ -115,27 +115,30 @@ if [ ! -f "$FWDIR/RELEASE" ]; then fi fi -if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then - TOOLS_DIR="$FWDIR"/tools - SPARK_TOOLS_JAR="" - if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then - # Use the JAR from the SBT build - export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar` - fi - if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then - # Use the JAR from the Maven build - # TODO: this also needs to become an assembly! - export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar` - fi +TOOLS_DIR="$FWDIR"/tools +SPARK_TOOLS_JAR="" +if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then + # Use the JAR from the SBT build + export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar` +fi +if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then + # Use the JAR from the Maven build + # TODO: this also needs to become an assembly! + export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar` fi # Compute classpath using external script CLASSPATH=`$FWDIR/bin/compute-classpath.sh` -CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR" + +if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then + CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR" +fi if $cygwin; then CLASSPATH=`cygpath -wp $CLASSPATH` - export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR` + if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then + export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR` + fi fi export CLASSPATH -- cgit v1.2.3