aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-04-28 17:26:57 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-28 17:26:57 -0700
commitcae054aaf41ca0ee585231896db67169b61af689 (patch)
tree0249c589120f7b14aad9c7d7ec7a31e71843dcf5
parent8421034e793c0960373a0a1d694ce334ad36e747 (diff)
downloadspark-cae054aaf41ca0ee585231896db67169b61af689.tar.gz
spark-cae054aaf41ca0ee585231896db67169b61af689.tar.bz2
spark-cae054aaf41ca0ee585231896db67169b61af689.zip
SPARK-1652: Spark submit should fail gracefully if YARN not enabled
Author: Patrick Wendell <pwendell@gmail.com> Closes #579 from pwendell/spark-submit-yarn-2 and squashes the following commits: 05e1b11 [Patrick Wendell] Small fix d2a40ad [Patrick Wendell] SPARK-1652: Spark submit should fail gracefully if YARN support not enabled
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala10
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
2 files changed, 16 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 24edc60684..c463ee0999 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -23,6 +23,7 @@ import java.net.{URI, URL}
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
import org.apache.spark.executor.ExecutorURLClassLoader
+import org.apache.spark.util.Utils
/**
* Scala code behind the spark-submit script. The script handles setting up the classpath with
@@ -128,6 +129,15 @@ object SparkSubmit {
childArgs += ("--class", appArgs.mainClass)
}
+ if (clusterManager == YARN) {
+ // The choice of class is arbitrary, could use any spark-yarn class
+ if (!Utils.classIsLoadable("org.apache.spark.deploy.yarn.Client") && !Utils.isTesting) {
+ val msg = "Could not load YARN classes. This copy of Spark may not have been compiled " +
+ "with YARN support."
+ throw new Exception(msg)
+ }
+ }
+
val options = List[OptionAssigner](
new OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp = "spark.master"),
new OptionAssigner(appArgs.driverExtraClassPath, STANDALONE | YARN, true,
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index b678604ff8..79f314c8dd 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -28,6 +28,7 @@ import scala.collection.Map
import scala.collection.mutable.ArrayBuffer
import scala.io.Source
import scala.reflect.ClassTag
+import scala.util.Try
import com.google.common.io.Files
import org.apache.commons.lang.SystemUtils
@@ -137,6 +138,11 @@ private[spark] object Utils extends Logging {
def getContextOrSparkClassLoader =
Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader)
+ /** Determines whether the provided class is loadable in the current thread. */
+ def classIsLoadable(clazz: String): Boolean = {
+ Try { Class.forName(clazz, false, getContextOrSparkClassLoader) }.isSuccess
+ }
+
/**
* Primitive often used when writing {@link java.nio.ByteBuffer} to {@link java.io.DataOutput}.
*/