aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorjerryshao <sshao@hortonworks.com>2017-01-06 10:07:54 -0600
committerTom Graves <tgraves@yahoo-inc.com>2017-01-06 10:07:54 -0600
commit4a4c3dc9ca10e52f7981b225ec44e97247986905 (patch)
tree6036fab912a00dd10bd9f9c9aa121617163bbe0c /core
parent903bb8e8a2b84b9ea82acbb8ae9d58754862be3a (diff)
downloadspark-4a4c3dc9ca10e52f7981b225ec44e97247986905.tar.gz
spark-4a4c3dc9ca10e52f7981b225ec44e97247986905.tar.bz2
spark-4a4c3dc9ca10e52f7981b225ec44e97247986905.zip
[SPARK-19033][CORE] Add admin acls for history server
## What changes were proposed in this pull request? Current HistoryServer's ACLs is derived from application event-log, which means the newly changed ACLs cannot be applied to the old data, this will become a problem where newly added admin cannot access the old application history UI, only the new application can be affected. So here propose to add admin ACLs for history server, any configured user/group could have the view access to all the applications, while the view ACLs derived from application run-time still take effect. ## How was this patch tested? Unit test added. Author: jerryshao <sshao@hortonworks.com> Closes #16470 from jerryshao/SPARK-19033.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala20
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala111
2 files changed, 124 insertions, 7 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
index 3011ed0f95..cd241d6d22 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
@@ -97,6 +97,13 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
.map { d => Utils.resolveURI(d).toString }
.getOrElse(DEFAULT_LOG_DIR)
+ private val HISTORY_UI_ACLS_ENABLE = conf.getBoolean("spark.history.ui.acls.enable", false)
+ private val HISTORY_UI_ADMIN_ACLS = conf.get("spark.history.ui.admin.acls", "")
+ private val HISTORY_UI_ADMIN_ACLS_GROUPS = conf.get("spark.history.ui.admin.acls.groups", "")
+ logInfo(s"History server ui acls " + (if (HISTORY_UI_ACLS_ENABLE) "enabled" else "disabled") +
+ "; users with admin permissions: " + HISTORY_UI_ADMIN_ACLS.toString +
+ "; groups with admin permissions" + HISTORY_UI_ADMIN_ACLS_GROUPS.toString)
+
private val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
private val fs = Utils.getHadoopFileSystem(logDir, hadoopConf)
@@ -250,13 +257,14 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
val appListener = replay(fileStatus, isApplicationCompleted(fileStatus), replayBus)
if (appListener.appId.isDefined) {
- val uiAclsEnabled = conf.getBoolean("spark.history.ui.acls.enable", false)
- ui.getSecurityManager.setAcls(uiAclsEnabled)
+ ui.getSecurityManager.setAcls(HISTORY_UI_ACLS_ENABLE)
// make sure to set admin acls before view acls so they are properly picked up
- ui.getSecurityManager.setAdminAcls(appListener.adminAcls.getOrElse(""))
- ui.getSecurityManager.setViewAcls(attempt.sparkUser,
- appListener.viewAcls.getOrElse(""))
- ui.getSecurityManager.setAdminAclsGroups(appListener.adminAclsGroups.getOrElse(""))
+ val adminAcls = HISTORY_UI_ADMIN_ACLS + "," + appListener.adminAcls.getOrElse("")
+ ui.getSecurityManager.setAdminAcls(adminAcls)
+ ui.getSecurityManager.setViewAcls(attempt.sparkUser, appListener.viewAcls.getOrElse(""))
+ val adminAclsGroups = HISTORY_UI_ADMIN_ACLS_GROUPS + "," +
+ appListener.adminAclsGroups.getOrElse("")
+ ui.getSecurityManager.setAdminAclsGroups(adminAclsGroups)
ui.getSecurityManager.setViewAclsGroups(appListener.viewAclsGroups.getOrElse(""))
Some(LoadedAppUI(ui, updateProbe(appId, attemptId, attempt.fileSize)))
} else {
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 027f412c75..8cb359ed45 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -35,10 +35,11 @@ import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
-import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.internal.Logging
import org.apache.spark.io._
import org.apache.spark.scheduler._
+import org.apache.spark.security.GroupMappingServiceProvider
import org.apache.spark.util.{Clock, JsonProtocol, ManualClock, Utils}
class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
@@ -474,6 +475,102 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}
}
+ test("support history server ui admin acls") {
+ def createAndCheck(conf: SparkConf, properties: (String, String)*)
+ (checkFn: SecurityManager => Unit): Unit = {
+ // Empty the testDir for each test.
+ if (testDir.exists() && testDir.isDirectory) {
+ testDir.listFiles().foreach { f => if (f.isFile) f.delete() }
+ }
+
+ var provider: FsHistoryProvider = null
+ try {
+ provider = new FsHistoryProvider(conf)
+ val log = newLogFile("app1", Some("attempt1"), inProgress = false)
+ writeFile(log, true, None,
+ SparkListenerApplicationStart("app1", Some("app1"), System.currentTimeMillis(),
+ "test", Some("attempt1")),
+ SparkListenerEnvironmentUpdate(Map(
+ "Spark Properties" -> properties.toSeq,
+ "JVM Information" -> Seq.empty,
+ "System Properties" -> Seq.empty,
+ "Classpath Entries" -> Seq.empty
+ )),
+ SparkListenerApplicationEnd(System.currentTimeMillis()))
+
+ provider.checkForLogs()
+ val appUi = provider.getAppUI("app1", Some("attempt1"))
+
+ assert(appUi.nonEmpty)
+ val securityManager = appUi.get.ui.securityManager
+ checkFn(securityManager)
+ } finally {
+ if (provider != null) {
+ provider.stop()
+ }
+ }
+ }
+
+ // Test both history ui admin acls and application acls are configured.
+ val conf1 = createTestConf()
+ .set("spark.history.ui.acls.enable", "true")
+ .set("spark.history.ui.admin.acls", "user1,user2")
+ .set("spark.history.ui.admin.acls.groups", "group1")
+ .set("spark.user.groups.mapping", classOf[TestGroupsMappingProvider].getName)
+
+ createAndCheck(conf1, ("spark.admin.acls", "user"), ("spark.admin.acls.groups", "group")) {
+ securityManager =>
+ // Test whether user has permission to access UI.
+ securityManager.checkUIViewPermissions("user1") should be (true)
+ securityManager.checkUIViewPermissions("user2") should be (true)
+ securityManager.checkUIViewPermissions("user") should be (true)
+ securityManager.checkUIViewPermissions("abc") should be (false)
+
+ // Test whether user with admin group has permission to access UI.
+ securityManager.checkUIViewPermissions("user3") should be (true)
+ securityManager.checkUIViewPermissions("user4") should be (true)
+ securityManager.checkUIViewPermissions("user5") should be (true)
+ securityManager.checkUIViewPermissions("user6") should be (false)
+ }
+
+ // Test only history ui admin acls are configured.
+ val conf2 = createTestConf()
+ .set("spark.history.ui.acls.enable", "true")
+ .set("spark.history.ui.admin.acls", "user1,user2")
+ .set("spark.history.ui.admin.acls.groups", "group1")
+ .set("spark.user.groups.mapping", classOf[TestGroupsMappingProvider].getName)
+ createAndCheck(conf2) { securityManager =>
+ // Test whether user has permission to access UI.
+ securityManager.checkUIViewPermissions("user1") should be (true)
+ securityManager.checkUIViewPermissions("user2") should be (true)
+ // Check the unknown "user" should return false
+ securityManager.checkUIViewPermissions("user") should be (false)
+
+ // Test whether user with admin group has permission to access UI.
+ securityManager.checkUIViewPermissions("user3") should be (true)
+ securityManager.checkUIViewPermissions("user4") should be (true)
+ // Check the "user5" without mapping relation should return false
+ securityManager.checkUIViewPermissions("user5") should be (false)
+ }
+
+ // Test neither history ui admin acls nor application acls are configured.
+ val conf3 = createTestConf()
+ .set("spark.history.ui.acls.enable", "true")
+ .set("spark.user.groups.mapping", classOf[TestGroupsMappingProvider].getName)
+ createAndCheck(conf3) { securityManager =>
+ // Test whether user has permission to access UI.
+ securityManager.checkUIViewPermissions("user1") should be (false)
+ securityManager.checkUIViewPermissions("user2") should be (false)
+ securityManager.checkUIViewPermissions("user") should be (false)
+
+ // Test whether user with admin group has permission to access UI.
+ // Check should be failed since we don't have acl group settings.
+ securityManager.checkUIViewPermissions("user3") should be (false)
+ securityManager.checkUIViewPermissions("user4") should be (false)
+ securityManager.checkUIViewPermissions("user5") should be (false)
+ }
+ }
+
/**
* Asks the provider to check for logs and calls a function to perform checks on the updated
* app list. Example:
@@ -532,3 +629,15 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}
}
+
+class TestGroupsMappingProvider extends GroupMappingServiceProvider {
+ private val mappings = Map(
+ "user3" -> "group1",
+ "user4" -> "group1",
+ "user5" -> "group")
+
+ override def getGroups(username: String): Set[String] = {
+ mappings.get(username).map(Set(_)).getOrElse(Set.empty)
+ }
+}
+