aboutsummaryrefslogtreecommitdiff
path: root/launcher/src/main
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-08-28 12:33:40 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2015-08-28 12:33:40 -0700
commitc53c902fa9c458200245f919067b41dde9cd9418 (patch)
treec81fe06b5b0a110b308fbdfadbe63687369f6610 /launcher/src/main
parentd3f87dc39480f075170817bbd00142967a938078 (diff)
downloadspark-c53c902fa9c458200245f919067b41dde9cd9418.tar.gz
spark-c53c902fa9c458200245f919067b41dde9cd9418.tar.bz2
spark-c53c902fa9c458200245f919067b41dde9cd9418.zip
[SPARK-9284] [TESTS] Allow all tests to run without an assembly.
This change aims at speeding up the dev cycle a little bit, by making sure that all tests behave the same w.r.t. where the code to be tested is loaded from. Namely, that means that tests don't rely on the assembly anymore, rather loading all needed classes from the build directories. The main change is to make sure all build directories (classes and test-classes) are added to the classpath of child processes when running tests. YarnClusterSuite required some custom code since the executors are run differently (i.e. not through the launcher library, like standalone and Mesos do). I also found a couple of tests that could leak a SparkContext on failure, and added code to handle those. With this patch, it's possible to run the following command from a clean source directory and have all tests pass: mvn -Pyarn -Phadoop-2.4 -Phive-thriftserver install Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #7629 from vanzin/SPARK-9284.
Diffstat (limited to 'launcher/src/main')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java28
1 files changed, 16 insertions, 12 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 5e793a5c48..0a237ee73b 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -169,9 +169,11 @@ abstract class AbstractCommandBuilder {
"streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
"yarn", "launcher");
if (prependClasses) {
- System.err.println(
- "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
- "assembly.");
+ if (!isTesting) {
+ System.err.println(
+ "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
+ "assembly.");
+ }
for (String project : projects) {
addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project,
scala));
@@ -200,7 +202,7 @@ abstract class AbstractCommandBuilder {
// For the user code case, we fall back to looking for the Spark assembly under SPARK_HOME.
// That duplicates some of the code in the shell scripts that look for the assembly, though.
String assembly = getenv(ENV_SPARK_ASSEMBLY);
- if (assembly == null && isEmpty(getenv("SPARK_TESTING"))) {
+ if (assembly == null && !isTesting) {
assembly = findAssembly();
}
addToClassPath(cp, assembly);
@@ -215,12 +217,14 @@ abstract class AbstractCommandBuilder {
libdir = new File(sparkHome, "lib_managed/jars");
}
- checkState(libdir.isDirectory(), "Library directory '%s' does not exist.",
- libdir.getAbsolutePath());
- for (File jar : libdir.listFiles()) {
- if (jar.getName().startsWith("datanucleus-")) {
- addToClassPath(cp, jar.getAbsolutePath());
+ if (libdir.isDirectory()) {
+ for (File jar : libdir.listFiles()) {
+ if (jar.getName().startsWith("datanucleus-")) {
+ addToClassPath(cp, jar.getAbsolutePath());
+ }
}
+ } else {
+ checkState(isTesting, "Library directory '%s' does not exist.", libdir.getAbsolutePath());
}
addToClassPath(cp, getenv("HADOOP_CONF_DIR"));
@@ -256,15 +260,15 @@ abstract class AbstractCommandBuilder {
return scala;
}
String sparkHome = getSparkHome();
- File scala210 = new File(sparkHome, "assembly/target/scala-2.10");
- File scala211 = new File(sparkHome, "assembly/target/scala-2.11");
+ File scala210 = new File(sparkHome, "launcher/target/scala-2.10");
+ File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
checkState(!scala210.isDirectory() || !scala211.isDirectory(),
"Presence of build for both scala versions (2.10 and 2.11) detected.\n" +
"Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
if (scala210.isDirectory()) {
return "2.10";
} else {
- checkState(scala211.isDirectory(), "Cannot find any assembly build directories.");
+ checkState(scala211.isDirectory(), "Cannot find any build directories.");
return "2.11";
}
}