From 6ca990fb366cf68cd9d5afb433725d28f07e51a0 Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Thu, 10 Mar 2016 23:28:34 -0800 Subject: [SPARK-13294][PROJECT INFRA] Remove MiMa's dependency on spark-class / Spark assembly This patch removes the need to build a full Spark assembly before running the `dev/mima` script. - I modified the `tools` project to remove a direct dependency on Spark, so `sbt/sbt tools/fullClasspath` will now return the classpath for the `GenerateMIMAIgnore` class itself plus its own dependencies. - This required me to delete two classes full of dead code that we don't use anymore - `GenerateMIMAIgnore` now uses [ClassUtil](http://software.clapper.org/classutil/) to find all of the Spark classes rather than our homemade JAR traversal code. The problem in our own code was that it didn't handle folders of classes properly, which is necessary in order to generate excludes with an assembly-free Spark build. - `./dev/mima` no longer runs through `spark-class`, eliminating the need to reason about classpath ordering between `SPARK_CLASSPATH` and the assembly. Author: Josh Rosen Closes #11178 from JoshRosen/remove-assembly-in-run-tests. --- .../spark/launcher/SparkClassCommandBuilder.java | 22 ---------------------- 1 file changed, 22 deletions(-) (limited to 'launcher') diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java index 40187236f2..6b9d36cc0b 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java @@ -17,12 +17,10 @@ package org.apache.spark.launcher; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; import static org.apache.spark.launcher.CommandBuilderUtils.*; @@ -76,26 +74,6 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder { javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS"); javaOptsKeys.add("SPARK_SHUFFLE_OPTS"); memKey = "SPARK_DAEMON_MEMORY"; - } else if (className.startsWith("org.apache.spark.tools.")) { - String sparkHome = getSparkHome(); - File toolsDir = new File(join(File.separator, sparkHome, "tools", "target", - "scala-" + getScalaVersion())); - checkState(toolsDir.isDirectory(), "Cannot find tools build directory."); - - Pattern re = Pattern.compile("spark-tools_.*\\.jar"); - for (File f : toolsDir.listFiles()) { - if (re.matcher(f.getName()).matches()) { - extraClassPath = f.getAbsolutePath(); - break; - } - } - - checkState(extraClassPath != null, - "Failed to find Spark Tools Jar in %s.\n" + - "You need to run \"build/sbt tools/package\" before running %s.", - toolsDir.getAbsolutePath(), className); - - javaOptsKeys.add("SPARK_JAVA_OPTS"); } else { javaOptsKeys.add("SPARK_JAVA_OPTS"); memKey = "SPARK_DRIVER_MEMORY"; -- cgit v1.2.3