aboutsummaryrefslogtreecommitdiff
path: root/spark-class
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2013-12-29 20:29:29 -0800
committerPatrick Wendell <pwendell@gmail.com>2013-12-29 23:14:33 -0800
commitcffe1c1d5c0abfbb463fa91e8b340a9c535532fe (patch)
tree884ee12344cd7fa046029c7aecec5d17f34a2a2d /spark-class
parent72a17b69f517ad7149d3bcd3a89e2cf715a2f65f (diff)
downloadspark-cffe1c1d5c0abfbb463fa91e8b340a9c535532fe.tar.gz
spark-cffe1c1d5c0abfbb463fa91e8b340a9c535532fe.tar.bz2
spark-cffe1c1d5c0abfbb463fa91e8b340a9c535532fe.zip
SPARK-1008: Logging improvments
1. Adds a default log4j file that gets loaded if users haven't specified a log4j file. 2. Isolates use of the tools assembly jar. I found this produced SLF4J warnings after building with SBT (and I've seen similar warnings on the mailing list).
Diffstat (limited to 'spark-class')
-rwxr-xr-xspark-class22
1 files changed, 12 insertions, 10 deletions
diff --git a/spark-class b/spark-class
index 802e4aa104..1c4323ee53 100755
--- a/spark-class
+++ b/spark-class
@@ -115,16 +115,18 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
fi
fi
-TOOLS_DIR="$FWDIR"/tools
-SPARK_TOOLS_JAR=""
-if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
- # Use the JAR from the SBT build
- export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
-fi
-if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
- # Use the JAR from the Maven build
- # TODO: this also needs to become an assembly!
- export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
+if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
+ TOOLS_DIR="$FWDIR"/tools
+ SPARK_TOOLS_JAR=""
+ if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
+ # Use the JAR from the SBT build
+ export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
+ fi
+ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
+ # Use the JAR from the Maven build
+ # TODO: this also needs to become an assembly!
+ export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
+ fi
fi
# Compute classpath using external script