aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-03-10 23:28:34 -0800
committerJosh Rosen <joshrosen@databricks.com>2016-03-10 23:28:34 -0800
commit6ca990fb366cf68cd9d5afb433725d28f07e51a0 (patch)
tree036d4b8afc9f8f14f95e5f31e92cabcd51850242 /launcher
parentd18276cb1d82790a402960835e112aebd0c55513 (diff)
downloadspark-6ca990fb366cf68cd9d5afb433725d28f07e51a0.tar.gz
spark-6ca990fb366cf68cd9d5afb433725d28f07e51a0.tar.bz2
spark-6ca990fb366cf68cd9d5afb433725d28f07e51a0.zip
[SPARK-13294][PROJECT INFRA] Remove MiMa's dependency on spark-class / Spark assembly
This patch removes the need to build a full Spark assembly before running the `dev/mima` script. - I modified the `tools` project to remove a direct dependency on Spark, so `sbt/sbt tools/fullClasspath` will now return the classpath for the `GenerateMIMAIgnore` class itself plus its own dependencies. - This required me to delete two classes full of dead code that we don't use anymore - `GenerateMIMAIgnore` now uses [ClassUtil](http://software.clapper.org/classutil/) to find all of the Spark classes rather than our homemade JAR traversal code. The problem in our own code was that it didn't handle folders of classes properly, which is necessary in order to generate excludes with an assembly-free Spark build. - `./dev/mima` no longer runs through `spark-class`, eliminating the need to reason about classpath ordering between `SPARK_CLASSPATH` and the assembly. Author: Josh Rosen <joshrosen@databricks.com> Closes #11178 from JoshRosen/remove-assembly-in-run-tests.
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java22
1 files changed, 0 insertions, 22 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
index 40187236f2..6b9d36cc0b 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
@@ -17,12 +17,10 @@
package org.apache.spark.launcher;
-import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.regex.Pattern;
import static org.apache.spark.launcher.CommandBuilderUtils.*;
@@ -76,26 +74,6 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
javaOptsKeys.add("SPARK_SHUFFLE_OPTS");
memKey = "SPARK_DAEMON_MEMORY";
- } else if (className.startsWith("org.apache.spark.tools.")) {
- String sparkHome = getSparkHome();
- File toolsDir = new File(join(File.separator, sparkHome, "tools", "target",
- "scala-" + getScalaVersion()));
- checkState(toolsDir.isDirectory(), "Cannot find tools build directory.");
-
- Pattern re = Pattern.compile("spark-tools_.*\\.jar");
- for (File f : toolsDir.listFiles()) {
- if (re.matcher(f.getName()).matches()) {
- extraClassPath = f.getAbsolutePath();
- break;
- }
- }
-
- checkState(extraClassPath != null,
- "Failed to find Spark Tools Jar in %s.\n" +
- "You need to run \"build/sbt tools/package\" before running %s.",
- toolsDir.getAbsolutePath(), className);
-
- javaOptsKeys.add("SPARK_JAVA_OPTS");
} else {
javaOptsKeys.add("SPARK_JAVA_OPTS");
memKey = "SPARK_DRIVER_MEMORY";