aboutsummaryrefslogtreecommitdiff
path: root/yarn
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-04-17 19:02:07 -0700
committerAndrew Or <andrew@databricks.com>2015-04-17 19:02:07 -0700
commit1991337336596f94698e79c2366f065c374128ab (patch)
tree9ddb184bf0adb8acf0ae063007bab1052093675f /yarn
parent6fbeb82e13db7117d8f216e6148632490a4bc5be (diff)
downloadspark-1991337336596f94698e79c2366f065c374128ab.tar.gz
spark-1991337336596f94698e79c2366f065c374128ab.tar.bz2
spark-1991337336596f94698e79c2366f065c374128ab.zip
[SPARK-5933] [core] Move config deprecation warnings to SparkConf.
I didn't find many deprecated configs after a grep-based search, but the ones I could find were moved to the centralized location in SparkConf. While there, I deprecated a couple more HS configs that mentioned time units. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #5562 from vanzin/SPARK-5933 and squashes the following commits: dcb617e7 [Marcelo Vanzin] [SPARK-5933] [core] Move config deprecation warnings to SparkConf.
Diffstat (limited to 'yarn')
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala9
1 files changed, 1 insertions, 8 deletions
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index c357b7ae9d..f7a84207e9 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -373,14 +373,7 @@ private[spark] class ApplicationMaster(
private def waitForSparkContextInitialized(): SparkContext = {
logInfo("Waiting for spark context initialization")
sparkContextRef.synchronized {
- val waitTries = sparkConf.getOption("spark.yarn.applicationMaster.waitTries")
- .map(_.toLong * 10000L)
- if (waitTries.isDefined) {
- logWarning(
- "spark.yarn.applicationMaster.waitTries is deprecated, use spark.yarn.am.waitTime")
- }
- val totalWaitTime = sparkConf.getTimeAsMs("spark.yarn.am.waitTime",
- s"${waitTries.getOrElse(100000L)}ms")
+ val totalWaitTime = sparkConf.getTimeAsMs("spark.yarn.am.waitTime", "100s")
val deadline = System.currentTimeMillis() + totalWaitTime
while (sparkContextRef.get() == null && System.currentTimeMillis < deadline && !finished) {