aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSandy Ryza <sandy@cloudera.com>2014-03-06 17:12:58 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-03-06 17:12:58 -0800
commit328c73d037c17440c2a91a6c88b4258fbefa0c08 (patch)
treec7640158f99b06dfef6b3ef43b00e3f2df231e2b /core
parent7edbea41b43e0dc11a2de156be220db8b7952d01 (diff)
downloadspark-328c73d037c17440c2a91a6c88b4258fbefa0c08.tar.gz
spark-328c73d037c17440c2a91a6c88b4258fbefa0c08.tar.bz2
spark-328c73d037c17440c2a91a6c88b4258fbefa0c08.zip
SPARK-1197. Change yarn-standalone to yarn-cluster and fix up running on YARN docs
This patch changes "yarn-standalone" to "yarn-cluster" (but still supports the former). It also cleans up the Running on YARN docs and adds a section on how to view logs. Author: Sandy Ryza <sandy@cloudera.com> Closes #95 from sryza/sandy-spark-1197 and squashes the following commits: 563ef3a [Sandy Ryza] Review feedback 6ad06d4 [Sandy Ryza] Change yarn-standalone to yarn-cluster and fix up running on YARN docs
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala14
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala4
2 files changed, 14 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 24731ad706..ce25573834 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -738,8 +738,10 @@ class SparkContext(
key = uri.getScheme match {
// A JAR file which exists only on the driver node
case null | "file" =>
- if (SparkHadoopUtil.get.isYarnMode() && master == "yarn-standalone") {
- // In order for this to work in yarn standalone mode the user must specify the
+ // yarn-standalone is deprecated, but still supported
+ if (SparkHadoopUtil.get.isYarnMode() &&
+ (master == "yarn-standalone" || master == "yarn-cluster")) {
+ // In order for this to work in yarn-cluster mode the user must specify the
// --addjars option to the client to upload the file into the distributed cache
// of the AM to make it show up in the current working directory.
val fileName = new Path(uri.getPath).getName()
@@ -1027,7 +1029,7 @@ class SparkContext(
* The SparkContext object contains a number of implicit conversions and parameters for use with
* various Spark features.
*/
-object SparkContext {
+object SparkContext extends Logging {
private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description"
@@ -1245,7 +1247,11 @@ object SparkContext {
}
scheduler
- case "yarn-standalone" =>
+ case "yarn-standalone" | "yarn-cluster" =>
+ if (master == "yarn-standalone") {
+ logWarning(
+ "\"yarn-standalone\" is deprecated as of Spark 1.0. Use \"yarn-cluster\" instead.")
+ }
val scheduler = try {
val clazz = Class.forName("org.apache.spark.scheduler.cluster.YarnClusterScheduler")
val cons = clazz.getConstructor(classOf[SparkContext])
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index f28d5c7b13..3bb936790d 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -95,6 +95,10 @@ class SparkContextSchedulerCreationSuite
}
}
+ test("yarn-cluster") {
+ testYarn("yarn-cluster", "org.apache.spark.scheduler.cluster.YarnClusterScheduler")
+ }
+
test("yarn-standalone") {
testYarn("yarn-standalone", "org.apache.spark.scheduler.cluster.YarnClusterScheduler")
}