aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-06-15 09:09:21 -0500
committerTom Graves <tgraves@yahoo-inc.com>2016-06-15 09:09:21 -0500
commit40eeef95256b0740d759d921f0385023f0b91666 (patch)
tree77b5983ab30e9a8c0ab2675cfe48341d686c7fbc /core/src
parent0ee9fd9e528206a5edfb2cc4a56538250b428aaf (diff)
downloadspark-40eeef95256b0740d759d921f0385023f0b91666.tar.gz
spark-40eeef95256b0740d759d921f0385023f0b91666.tar.bz2
spark-40eeef95256b0740d759d921f0385023f0b91666.zip
[SPARK-15046][YARN] Parse value of token renewal interval correctly.
Use the config variable definition both to set and parse the value, avoiding issues with code expecting the value in a different format. Tested by running spark-submit with --principal / --keytab. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #13669 from vanzin/SPARK-15046.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/package.scala7
2 files changed, 9 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 7a5fc866bb..bb1793d451 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config._
import org.apache.spark.util.Utils
/**
@@ -288,8 +289,7 @@ class SparkHadoopUtil extends Logging {
credentials: Credentials): Long = {
val now = System.currentTimeMillis()
- val renewalInterval =
- sparkConf.getLong("spark.yarn.token.renewal.interval", (24 hours).toMillis)
+ val renewalInterval = sparkConf.get(TOKEN_RENEWAL_INTERVAL).get
credentials.getAllTokens.asScala
.filter(_.getKind == DelegationTokenIdentifier.HDFS_DELEGATION_KIND)
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 2c1e0b71e3..05dd68300f 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -17,6 +17,8 @@
package org.apache.spark.internal
+import java.util.concurrent.TimeUnit
+
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.network.util.ByteUnit
@@ -80,6 +82,11 @@ package object config {
.doc("Name of the Kerberos principal.")
.stringConf.createOptional
+ private[spark] val TOKEN_RENEWAL_INTERVAL = ConfigBuilder("spark.yarn.token.renewal.interval")
+ .internal()
+ .timeConf(TimeUnit.MILLISECONDS)
+ .createOptional
+
private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
.intConf
.createOptional