aboutsummaryrefslogtreecommitdiff
path: root/yarn/src/test/scala/org
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-03-07 14:13:44 -0800
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-07 14:13:44 -0800
commite1fb857992074164dcaa02498c5a9604fac6f57e (patch)
tree5f2a9de0230df4ebd0ca7317c879472eb8d3fbbc /yarn/src/test/scala/org
parente9e67b39abb23a88d8be2d0fea5b5fd93184a25b (diff)
downloadspark-e1fb857992074164dcaa02498c5a9604fac6f57e.tar.gz
spark-e1fb857992074164dcaa02498c5a9604fac6f57e.tar.bz2
spark-e1fb857992074164dcaa02498c5a9604fac6f57e.zip
[SPARK-529][CORE][YARN] Add type-safe config keys to SparkConf.
This is, in a way, the basics to enable SPARK-529 (which was closed as won't fix but I think is still valuable). In fact, Spark SQL created something for that, and this change basically factors out that code and inserts it into SparkConf, with some extra bells and whistles. To showcase the usage of this pattern, I modified the YARN backend to use the new config keys (defined in the new `config` package object under `o.a.s.deploy.yarn`). Most of the changes are mechanic, although logic had to be slightly modified in a handful of places. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #10205 from vanzin/conf-opts.
Diffstat (limited to 'yarn/src/test/scala/org')
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala26
-rw-r--r--yarn/src/test/scala/org/apache/spark/scheduler/cluster/ExtensionServiceIntegrationSuite.scala4
2 files changed, 16 insertions, 14 deletions
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index 19065373c6..b57c179d89 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -41,6 +41,7 @@ import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfterAll, Matchers}
import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.deploy.yarn.config._
import org.apache.spark.util.{ResetSystemProperties, Utils}
class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
@@ -103,8 +104,9 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
test("Local jar URIs") {
val conf = new Configuration()
- val sparkConf = new SparkConf().set(Client.CONF_SPARK_JAR, SPARK)
- .set("spark.yarn.user.classpath.first", "true")
+ val sparkConf = new SparkConf()
+ .set(SPARK_JAR, SPARK)
+ .set(USER_CLASS_PATH_FIRST, true)
val env = new MutableHashMap[String, String]()
val args = new ClientArguments(Array("--jar", USER, "--addJars", ADDED), sparkConf)
@@ -129,13 +131,13 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
}
cp should contain(pwdVar)
cp should contain (s"$pwdVar${Path.SEPARATOR}${Client.LOCALIZED_CONF_DIR}")
- cp should not contain (Client.SPARK_JAR)
- cp should not contain (Client.APP_JAR)
+ cp should not contain (Client.SPARK_JAR_NAME)
+ cp should not contain (Client.APP_JAR_NAME)
}
test("Jar path propagation through SparkConf") {
val conf = new Configuration()
- val sparkConf = new SparkConf().set(Client.CONF_SPARK_JAR, SPARK)
+ val sparkConf = new SparkConf().set(SPARK_JAR, SPARK)
val args = new ClientArguments(Array("--jar", USER, "--addJars", ADDED), sparkConf)
val client = spy(new Client(args, conf, sparkConf))
@@ -145,7 +147,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
val tempDir = Utils.createTempDir()
try {
client.prepareLocalResources(tempDir.getAbsolutePath(), Nil)
- sparkConf.getOption(Client.CONF_SPARK_USER_JAR) should be (Some(USER))
+ sparkConf.get(APP_JAR) should be (Some(USER))
// The non-local path should be propagated by name only, since it will end up in the app's
// staging dir.
@@ -160,7 +162,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
})
.mkString(",")
- sparkConf.getOption(Client.CONF_SPARK_YARN_SECONDARY_JARS) should be (Some(expected))
+ sparkConf.get(SECONDARY_JARS) should be (Some(expected.split(",").toSeq))
} finally {
Utils.deleteRecursively(tempDir)
}
@@ -169,9 +171,9 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
test("Cluster path translation") {
val conf = new Configuration()
val sparkConf = new SparkConf()
- .set(Client.CONF_SPARK_JAR, "local:/localPath/spark.jar")
- .set("spark.yarn.config.gatewayPath", "/localPath")
- .set("spark.yarn.config.replacementPath", "/remotePath")
+ .set(SPARK_JAR.key, "local:/localPath/spark.jar")
+ .set(GATEWAY_ROOT_PATH, "/localPath")
+ .set(REPLACEMENT_ROOT_PATH, "/remotePath")
Client.getClusterPath(sparkConf, "/localPath") should be ("/remotePath")
Client.getClusterPath(sparkConf, "/localPath/1:/localPath/2") should be (
@@ -191,8 +193,8 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
// Spaces between non-comma strings should be preserved as single tags. Empty strings may or
// may not be removed depending on the version of Hadoop being used.
val sparkConf = new SparkConf()
- .set(Client.CONF_SPARK_YARN_APPLICATION_TAGS, ",tag1, dup,tag2 , ,multi word , dup")
- .set("spark.yarn.maxAppAttempts", "42")
+ .set(APPLICATION_TAGS.key, ",tag1, dup,tag2 , ,multi word , dup")
+ .set(MAX_APP_ATTEMPTS, 42)
val args = new ClientArguments(Array(
"--name", "foo-test-app",
"--queue", "staging-queue"), sparkConf)
diff --git a/yarn/src/test/scala/org/apache/spark/scheduler/cluster/ExtensionServiceIntegrationSuite.scala b/yarn/src/test/scala/org/apache/spark/scheduler/cluster/ExtensionServiceIntegrationSuite.scala
index b4d1b0a3d2..338fbe2ef4 100644
--- a/yarn/src/test/scala/org/apache/spark/scheduler/cluster/ExtensionServiceIntegrationSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/scheduler/cluster/ExtensionServiceIntegrationSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.scheduler.cluster
import org.scalatest.BeforeAndAfter
import org.apache.spark.{LocalSparkContext, Logging, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.deploy.yarn.config._
/**
* Test the integration with [[SchedulerExtensionServices]]
@@ -36,8 +37,7 @@ class ExtensionServiceIntegrationSuite extends SparkFunSuite
*/
before {
val sparkConf = new SparkConf()
- sparkConf.set(SchedulerExtensionServices.SPARK_YARN_SERVICES,
- classOf[SimpleExtensionService].getName())
+ sparkConf.set(SCHEDULER_SERVICES, Seq(classOf[SimpleExtensionService].getName()))
sparkConf.setMaster("local").setAppName("ExtensionServiceIntegrationSuite")
sc = new SparkContext(sparkConf)
}