aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java28
-rw-r--r--launcher/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java144
2 files changed, 16 insertions, 156 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 5e793a5c48..0a237ee73b 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -169,9 +169,11 @@ abstract class AbstractCommandBuilder {
"streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
"yarn", "launcher");
if (prependClasses) {
- System.err.println(
- "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
- "assembly.");
+ if (!isTesting) {
+ System.err.println(
+ "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
+ "assembly.");
+ }
for (String project : projects) {
addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project,
scala));
@@ -200,7 +202,7 @@ abstract class AbstractCommandBuilder {
// For the user code case, we fall back to looking for the Spark assembly under SPARK_HOME.
// That duplicates some of the code in the shell scripts that look for the assembly, though.
String assembly = getenv(ENV_SPARK_ASSEMBLY);
- if (assembly == null && isEmpty(getenv("SPARK_TESTING"))) {
+ if (assembly == null && !isTesting) {
assembly = findAssembly();
}
addToClassPath(cp, assembly);
@@ -215,12 +217,14 @@ abstract class AbstractCommandBuilder {
libdir = new File(sparkHome, "lib_managed/jars");
}
- checkState(libdir.isDirectory(), "Library directory '%s' does not exist.",
- libdir.getAbsolutePath());
- for (File jar : libdir.listFiles()) {
- if (jar.getName().startsWith("datanucleus-")) {
- addToClassPath(cp, jar.getAbsolutePath());
+ if (libdir.isDirectory()) {
+ for (File jar : libdir.listFiles()) {
+ if (jar.getName().startsWith("datanucleus-")) {
+ addToClassPath(cp, jar.getAbsolutePath());
+ }
}
+ } else {
+ checkState(isTesting, "Library directory '%s' does not exist.", libdir.getAbsolutePath());
}
addToClassPath(cp, getenv("HADOOP_CONF_DIR"));
@@ -256,15 +260,15 @@ abstract class AbstractCommandBuilder {
return scala;
}
String sparkHome = getSparkHome();
- File scala210 = new File(sparkHome, "assembly/target/scala-2.10");
- File scala211 = new File(sparkHome, "assembly/target/scala-2.11");
+ File scala210 = new File(sparkHome, "launcher/target/scala-2.10");
+ File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
checkState(!scala210.isDirectory() || !scala211.isDirectory(),
"Presence of build for both scala versions (2.10 and 2.11) detected.\n" +
"Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
if (scala210.isDirectory()) {
return "2.10";
} else {
- checkState(scala211.isDirectory(), "Cannot find any assembly build directories.");
+ checkState(scala211.isDirectory(), "Cannot find any build directories.");
return "2.11";
}
}
diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
deleted file mode 100644
index d0c26dd056..0000000000
--- a/launcher/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.launcher;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import static org.junit.Assert.*;
-
-/**
- * These tests require the Spark assembly to be built before they can be run.
- */
-public class SparkLauncherSuite {
-
- private static final Logger LOG = LoggerFactory.getLogger(SparkLauncherSuite.class);
-
- @Test
- public void testSparkArgumentHandling() throws Exception {
- SparkLauncher launcher = new SparkLauncher()
- .setSparkHome(System.getProperty("spark.test.home"));
- SparkSubmitOptionParser opts = new SparkSubmitOptionParser();
-
- launcher.addSparkArg(opts.HELP);
- try {
- launcher.addSparkArg(opts.PROXY_USER);
- fail("Expected IllegalArgumentException.");
- } catch (IllegalArgumentException e) {
- // Expected.
- }
-
- launcher.addSparkArg(opts.PROXY_USER, "someUser");
- try {
- launcher.addSparkArg(opts.HELP, "someValue");
- fail("Expected IllegalArgumentException.");
- } catch (IllegalArgumentException e) {
- // Expected.
- }
-
- launcher.addSparkArg("--future-argument");
- launcher.addSparkArg("--future-argument", "someValue");
-
- launcher.addSparkArg(opts.MASTER, "myMaster");
- assertEquals("myMaster", launcher.builder.master);
-
- launcher.addJar("foo");
- launcher.addSparkArg(opts.JARS, "bar");
- assertEquals(Arrays.asList("bar"), launcher.builder.jars);
-
- launcher.addFile("foo");
- launcher.addSparkArg(opts.FILES, "bar");
- assertEquals(Arrays.asList("bar"), launcher.builder.files);
-
- launcher.addPyFile("foo");
- launcher.addSparkArg(opts.PY_FILES, "bar");
- assertEquals(Arrays.asList("bar"), launcher.builder.pyFiles);
-
- launcher.setConf("spark.foo", "foo");
- launcher.addSparkArg(opts.CONF, "spark.foo=bar");
- assertEquals("bar", launcher.builder.conf.get("spark.foo"));
- }
-
- @Test
- public void testChildProcLauncher() throws Exception {
- SparkSubmitOptionParser opts = new SparkSubmitOptionParser();
- Map<String, String> env = new HashMap<String, String>();
- env.put("SPARK_PRINT_LAUNCH_COMMAND", "1");
-
- SparkLauncher launcher = new SparkLauncher(env)
- .setSparkHome(System.getProperty("spark.test.home"))
- .setMaster("local")
- .setAppResource("spark-internal")
- .addSparkArg(opts.CONF,
- String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS))
- .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
- "-Dfoo=bar -Dtest.name=-testChildProcLauncher")
- .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path"))
- .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow")
- .setMainClass(SparkLauncherTestApp.class.getName())
- .addAppArgs("proc");
- final Process app = launcher.launch();
- new Redirector("stdout", app.getInputStream()).start();
- new Redirector("stderr", app.getErrorStream()).start();
- assertEquals(0, app.waitFor());
- }
-
- public static class SparkLauncherTestApp {
-
- public static void main(String[] args) throws Exception {
- assertEquals(1, args.length);
- assertEquals("proc", args[0]);
- assertEquals("bar", System.getProperty("foo"));
- assertEquals("local", System.getProperty(SparkLauncher.SPARK_MASTER));
- }
-
- }
-
- private static class Redirector extends Thread {
-
- private final InputStream in;
-
- Redirector(String name, InputStream in) {
- this.in = in;
- setName(name);
- setDaemon(true);
- }
-
- @Override
- public void run() {
- try {
- BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
- String line;
- while ((line = reader.readLine()) != null) {
- LOG.warn(line);
- }
- } catch (Exception e) {
- LOG.error("Error reading process output.", e);
- }
- }
-
- }
-
-}