aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/package.scala7
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala5
3 files changed, 9 insertions, 7 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 7a5fc866bb..bb1793d451 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config._
import org.apache.spark.util.Utils
/**
@@ -288,8 +289,7 @@ class SparkHadoopUtil extends Logging {
credentials: Credentials): Long = {
val now = System.currentTimeMillis()
- val renewalInterval =
- sparkConf.getLong("spark.yarn.token.renewal.interval", (24 hours).toMillis)
+ val renewalInterval = sparkConf.get(TOKEN_RENEWAL_INTERVAL).get
credentials.getAllTokens.asScala
.filter(_.getKind == DelegationTokenIdentifier.HDFS_DELEGATION_KIND)
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 2c1e0b71e3..05dd68300f 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -17,6 +17,8 @@
package org.apache.spark.internal
+import java.util.concurrent.TimeUnit
+
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.network.util.ByteUnit
@@ -80,6 +82,11 @@ package object config {
.doc("Name of the Kerberos principal.")
.stringConf.createOptional
+ private[spark] val TOKEN_RENEWAL_INTERVAL = ConfigBuilder("spark.yarn.token.renewal.interval")
+ .internal()
+ .timeConf(TimeUnit.MILLISECONDS)
+ .createOptional
+
private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
.intConf
.createOptional
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
index c4dd3202f0..ad2412e025 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
@@ -243,11 +243,6 @@ package object config {
.toSequence
.createWithDefault(Nil)
- private[spark] val TOKEN_RENEWAL_INTERVAL = ConfigBuilder("spark.yarn.token.renewal.interval")
- .internal()
- .timeConf(TimeUnit.MILLISECONDS)
- .createOptional
-
/* Private configs. */
private[spark] val CREDENTIALS_FILE_PATH = ConfigBuilder("spark.yarn.credentials.file")