aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-03-30 13:59:10 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-30 13:59:10 -0700
commitbdabfd43f6e4900b48010dd00ffa48ed5fd15997 (patch)
treed8366ba2dc0108b5a0e0ed636719c38dec5cd1bd
parentd46c71b39da92f5cabf6d9057c953c52f7f3f965 (diff)
downloadspark-bdabfd43f6e4900b48010dd00ffa48ed5fd15997.tar.gz
spark-bdabfd43f6e4900b48010dd00ffa48ed5fd15997.tar.bz2
spark-bdabfd43f6e4900b48010dd00ffa48ed5fd15997.zip
[SPARK-13955][YARN] Also look for Spark jars in the build directory.
Move the logic to find Spark jars to CommandBuilderUtils and make it available for YARN code, so that it's possible to easily launch Spark on YARN from a build directory. Tested by running SparkPi from the build directory on YARN. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #11970 from vanzin/SPARK-13955.
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java23
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java25
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala11
-rw-r--r--yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala9
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala3
5 files changed, 42 insertions, 29 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 587fda7a3c..d02b2a4994 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -174,7 +174,7 @@ abstract class AbstractCommandBuilder {
// Add Spark jars to the classpath. For the testing case, we rely on the test code to set and
// propagate the test classpath appropriately. For normal invocation, look for the jars
// directory under SPARK_HOME.
- String jarsDir = findJarsDir(!isTesting);
+ String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting);
if (jarsDir != null) {
addToClassPath(cp, join(File.separator, jarsDir, "*"));
}
@@ -311,27 +311,6 @@ abstract class AbstractCommandBuilder {
return props;
}
- private String findJarsDir(boolean failIfNotFound) {
- // TODO: change to the correct directory once the assembly build is changed.
- String sparkHome = getSparkHome();
- File libdir;
- if (new File(sparkHome, "RELEASE").isFile()) {
- libdir = new File(sparkHome, "lib");
- checkState(!failIfNotFound || libdir.isDirectory(),
- "Library directory '%s' does not exist.",
- libdir.getAbsolutePath());
- } else {
- libdir = new File(sparkHome, String.format("assembly/target/scala-%s", getScalaVersion()));
- if (!libdir.isDirectory()) {
- checkState(!failIfNotFound,
- "Library directory '%s' does not exist; make sure Spark is built.",
- libdir.getAbsolutePath());
- libdir = null;
- }
- }
- return libdir != null ? libdir.getAbsolutePath() : null;
- }
-
private String getConfDir() {
String confDir = getenv("SPARK_CONF_DIR");
return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf");
diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
index 39fdf300e2..1e55aad5c9 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
@@ -349,4 +349,29 @@ class CommandBuilderUtils {
return Integer.parseInt(version[1]);
}
}
+
+ /**
+ * Find the location of the Spark jars dir, depending on whether we're looking at a build
+ * or a distribution directory.
+ */
+ static String findJarsDir(String sparkHome, String scalaVersion, boolean failIfNotFound) {
+ // TODO: change to the correct directory once the assembly build is changed.
+ File libdir;
+ if (new File(sparkHome, "RELEASE").isFile()) {
+ libdir = new File(sparkHome, "lib");
+ checkState(!failIfNotFound || libdir.isDirectory(),
+ "Library directory '%s' does not exist.",
+ libdir.getAbsolutePath());
+ } else {
+ libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion));
+ if (!libdir.isDirectory()) {
+ checkState(!failIfNotFound,
+ "Library directory '%s' does not exist; make sure Spark is built.",
+ libdir.getAbsolutePath());
+ libdir = null;
+ }
+ }
+ return libdir != null ? libdir.getAbsolutePath() : null;
+ }
+
}
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 6bbc8c2dfa..7b29c1ae4d 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -468,12 +468,11 @@ private[spark] class Client(
// No configuration, so fall back to uploading local jar files.
logWarning(s"Neither ${SPARK_JARS.key} nor ${SPARK_ARCHIVE.key} is set, falling back " +
"to uploading libraries under SPARK_HOME.")
- val jarsDir = new File(sparkConf.getenv("SPARK_HOME"), "lib")
- if (jarsDir.isDirectory()) {
- jarsDir.listFiles().foreach { f =>
- if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) {
- distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR))
- }
+ val jarsDir = new File(YarnCommandBuilderUtils.findJarsDir(
+ sparkConf.getenv("SPARK_HOME")))
+ jarsDir.listFiles().foreach { f =>
+ if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) {
+ distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR))
}
}
}
diff --git a/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala b/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala
index 7d246bf407..6c3556a2ee 100644
--- a/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala
+++ b/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala
@@ -19,6 +19,7 @@ package org.apache.spark.launcher
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
+import scala.util.Properties
/**
* Exposes methods from the launcher library that are used by the YARN backend.
@@ -29,6 +30,14 @@ private[spark] object YarnCommandBuilderUtils {
CommandBuilderUtils.quoteForBatchScript(arg)
}
+ def findJarsDir(sparkHome: String): String = {
+ val scalaVer = Properties.versionNumberString
+ .split("\\.")
+ .take(2)
+ .mkString(".")
+ CommandBuilderUtils.findJarsDir(sparkHome, scalaVer, true)
+ }
+
/**
* Adds the perm gen configuration to the list of java options if needed and not yet added.
*
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index 24472e006b..e3613a93ed 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.deploy.yarn
-import java.io.File
+import java.io.{File, FileOutputStream}
import java.net.URI
import java.util.Properties
@@ -274,6 +274,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
val jarsDir = new File(temp, "lib")
assert(jarsDir.mkdir())
val jar = TestUtils.createJarWithFiles(Map(), jarsDir)
+ new FileOutputStream(new File(temp, "RELEASE")).close()
val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> temp.getAbsolutePath()))
val client = createClient(sparkConf)