aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java47
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java4
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java11
3 files changed, 32 insertions, 30 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index d02b2a4994..7a5e37c501 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -144,10 +144,26 @@ abstract class AbstractCommandBuilder {
boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
if (prependClasses || isTesting) {
String scala = getScalaVersion();
- List<String> projects = Arrays.asList("core", "repl", "mllib", "graphx",
- "streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
- "yarn", "launcher",
- "common/network-common", "common/network-shuffle", "common/network-yarn");
+ List<String> projects = Arrays.asList(
+ "common/network-common",
+ "common/network-shuffle",
+ "common/network-yarn",
+ "common/sketch",
+ "common/tags",
+ "common/unsafe",
+ "core",
+ "examples",
+ "graphx",
+ "launcher",
+ "mllib",
+ "repl",
+ "sql/catalyst",
+ "sql/core",
+ "sql/hive",
+ "sql/hive-thriftserver",
+ "streaming",
+ "yarn"
+ );
if (prependClasses) {
if (!isTesting) {
System.err.println(
@@ -174,31 +190,12 @@ abstract class AbstractCommandBuilder {
// Add Spark jars to the classpath. For the testing case, we rely on the test code to set and
// propagate the test classpath appropriately. For normal invocation, look for the jars
// directory under SPARK_HOME.
- String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting);
+ boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING"));
+ String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql);
if (jarsDir != null) {
addToClassPath(cp, join(File.separator, jarsDir, "*"));
}
- // Datanucleus jars must be included on the classpath. Datanucleus jars do not work if only
- // included in the uber jar as plugin.xml metadata is lost. Both sbt and maven will populate
- // "lib_managed/jars/" with the datanucleus jars when Spark is built with Hive
- File libdir;
- if (new File(sparkHome, "RELEASE").isFile()) {
- libdir = new File(sparkHome, "lib");
- } else {
- libdir = new File(sparkHome, "lib_managed/jars");
- }
-
- if (libdir.isDirectory()) {
- for (File jar : libdir.listFiles()) {
- if (jar.getName().startsWith("datanucleus-")) {
- addToClassPath(cp, jar.getAbsolutePath());
- }
- }
- } else {
- checkState(isTesting, "Library directory '%s' does not exist.", libdir.getAbsolutePath());
- }
-
addToClassPath(cp, getenv("HADOOP_CONF_DIR"));
addToClassPath(cp, getenv("YARN_CONF_DIR"));
addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH"));
diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
index a08c8dcba4..91586aad7b 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
@@ -358,12 +358,12 @@ class CommandBuilderUtils {
// TODO: change to the correct directory once the assembly build is changed.
File libdir;
if (new File(sparkHome, "RELEASE").isFile()) {
- libdir = new File(sparkHome, "lib");
+ libdir = new File(sparkHome, "jars");
checkState(!failIfNotFound || libdir.isDirectory(),
"Library directory '%s' does not exist.",
libdir.getAbsolutePath());
} else {
- libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion));
+ libdir = new File(sparkHome, String.format("assembly/target/scala-%s/jars", scalaVersion));
if (!libdir.isDirectory()) {
checkState(!failIfNotFound,
"Library directory '%s' does not exist; make sure Spark is built.",
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 56e4107c5a..c31c42cd3a 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -336,6 +336,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
private List<String> findExamplesJars() {
+ boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
List<String> examplesJars = new ArrayList<>();
String sparkHome = getSparkHome();
@@ -346,11 +347,15 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
jarsDir = new File(sparkHome,
String.format("examples/target/scala-%s/jars", getScalaVersion()));
}
- checkState(jarsDir.isDirectory(), "Examples jars directory '%s' does not exist.",
+
+ boolean foundDir = jarsDir.isDirectory();
+ checkState(isTesting || foundDir, "Examples jars directory '%s' does not exist.",
jarsDir.getAbsolutePath());
- for (File f: jarsDir.listFiles()) {
- examplesJars.add(f.getAbsolutePath());
+ if (foundDir) {
+ for (File f: jarsDir.listFiles()) {
+ examplesJars.add(f.getAbsolutePath());
+ }
}
return examplesJars;
}