aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/SecurityManager.scala26
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala11
-rw-r--r--core/src/main/scala/org/apache/spark/ui/JettyUtils.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala8
-rw-r--r--core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala63
-rw-r--r--docs/monitoring.md13
7 files changed, 128 insertions, 11 deletions
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index b52f2d4f41..b4b0067801 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -19,8 +19,6 @@ package org.apache.spark
import java.net.{Authenticator, PasswordAuthentication}
-import scala.collection.mutable.ArrayBuffer
-
import org.apache.hadoop.io.Text
import org.apache.spark.deploy.SparkHadoopUtil
@@ -139,13 +137,13 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging {
private val sparkSecretLookupKey = "sparkCookie"
private val authOn = sparkConf.getBoolean("spark.authenticate", false)
- private val uiAclsOn = sparkConf.getBoolean("spark.ui.acls.enable", false)
+ private var uiAclsOn = sparkConf.getBoolean("spark.ui.acls.enable", false)
+ private var viewAcls: Set[String] = _
// always add the current user and SPARK_USER to the viewAcls
- private val aclUsers = ArrayBuffer[String](System.getProperty("user.name", ""),
+ private val defaultAclUsers = Seq[String](System.getProperty("user.name", ""),
Option(System.getenv("SPARK_USER")).getOrElse(""))
- aclUsers ++= sparkConf.get("spark.ui.view.acls", "").split(',')
- private val viewAcls = aclUsers.map(_.trim()).filter(!_.isEmpty).toSet
+ setViewAcls(defaultAclUsers, sparkConf.get("spark.ui.view.acls", ""))
private val secretKey = generateSecretKey()
logInfo("SecurityManager, is authentication enabled: " + authOn +
@@ -170,6 +168,20 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging {
)
}
+ private[spark] def setViewAcls(defaultUsers: Seq[String], allowedUsers: String) {
+ viewAcls = (defaultUsers ++ allowedUsers.split(',')).map(_.trim()).filter(!_.isEmpty).toSet
+ logInfo("Changing view acls to: " + viewAcls.mkString(","))
+ }
+
+ private[spark] def setViewAcls(defaultUser: String, allowedUsers: String) {
+ setViewAcls(Seq[String](defaultUser), allowedUsers)
+ }
+
+ private[spark] def setUIAcls(aclSetting: Boolean) {
+ uiAclsOn = aclSetting
+ logInfo("Changing acls enabled to: " + uiAclsOn)
+ }
+
/**
* Generates or looks up the secret key.
*
@@ -222,6 +234,8 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging {
* @return true is the user has permission, otherwise false
*/
def checkUIViewPermissions(user: String): Boolean = {
+ logDebug("user=" + user + " uiAclsEnabled=" + uiAclsEnabled() + " viewAcls=" +
+ viewAcls.mkString(","))
if (uiAclsEnabled() && (user != null) && (!viewAcls.contains(user))) false else true
}
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index d7a3246bcf..1238bbf9da 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -168,17 +168,21 @@ class HistoryServer(
* directory. If this file exists, the associated application is regarded to be completed, in
* which case the server proceeds to render the SparkUI. Otherwise, the server does nothing.
*/
- private def renderSparkUI(logDir: FileStatus, logInfo: EventLoggingInfo) {
+ private def renderSparkUI(logDir: FileStatus, elogInfo: EventLoggingInfo) {
val path = logDir.getPath
val appId = path.getName
- val replayBus = new ReplayListenerBus(logInfo.logPaths, fileSystem, logInfo.compressionCodec)
+ val replayBus = new ReplayListenerBus(elogInfo.logPaths, fileSystem, elogInfo.compressionCodec)
val appListener = new ApplicationEventListener
replayBus.addListener(appListener)
- val ui = new SparkUI(conf, replayBus, appId, "/history/" + appId)
+ val appConf = conf.clone()
+ val appSecManager = new SecurityManager(appConf)
+ val ui = new SparkUI(conf, appSecManager, replayBus, appId, "/history/" + appId)
// Do not call ui.bind() to avoid creating a new server for each application
replayBus.replay()
if (appListener.applicationStarted) {
+ appSecManager.setUIAcls(HISTORY_UI_ACLS_ENABLED)
+ appSecManager.setViewAcls(appListener.sparkUser, appListener.viewAcls)
attachSparkUI(ui)
val appName = appListener.appName
val sparkUser = appListener.sparkUser
@@ -202,6 +206,7 @@ class HistoryServer(
private def attachSparkUI(ui: SparkUI) {
assert(serverInfo.isDefined, "HistoryServer must be bound before attaching SparkUIs")
ui.getHandlers.foreach(attachHandler)
+ addFilters(ui.getHandlers, conf)
}
/** Detach a reconstructed UI from this server. Only valid after bind(). */
@@ -255,6 +260,9 @@ object HistoryServer {
// The port to which the web UI is bound
val WEB_UI_PORT = conf.getInt("spark.history.ui.port", 18080)
+ // set whether to enable or disable view acls for all applications
+ val HISTORY_UI_ACLS_ENABLED = conf.getBoolean("spark.history.ui.acls.enable", false)
+
val STATIC_RESOURCE_DIR = SparkUI.STATIC_RESOURCE_DIR
def main(argStrings: Array[String]) {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala b/core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala
index c100122715..cd5d44ad4a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala
@@ -28,6 +28,8 @@ private[spark] class ApplicationEventListener extends SparkListener {
var sparkUser = "<Not Started>"
var startTime = -1L
var endTime = -1L
+ var viewAcls = ""
+ var enableViewAcls = false
def applicationStarted = startTime != -1
@@ -47,4 +49,13 @@ private[spark] class ApplicationEventListener extends SparkListener {
override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd) {
endTime = applicationEnd.time
}
+
+ override def onEnvironmentUpdate(environmentUpdate: SparkListenerEnvironmentUpdate) {
+ synchronized {
+ val environmentDetails = environmentUpdate.environmentDetails
+ val allProperties = environmentDetails("Spark Properties").toMap
+ viewAcls = allProperties.getOrElse("spark.ui.view.acls", "")
+ enableViewAcls = allProperties.getOrElse("spark.ui.acls.enable", "false").toBoolean
+ }
+ }
}
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index fdeb15b5d0..b3ac2320f3 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -137,8 +137,8 @@ private[spark] object JettyUtils extends Logging {
contextHandler
}
- /** Add security filters, if any, do the given list of ServletContextHandlers */
- private def addFilters(handlers: Seq[ServletContextHandler], conf: SparkConf) {
+ /** Add filters, if any, to the given list of ServletContextHandlers */
+ def addFilters(handlers: Seq[ServletContextHandler], conf: SparkConf) {
val filters: Array[String] = conf.get("spark.ui.filters", "").split(',').map(_.trim())
filters.foreach {
case filter : String =>
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 2fef1a6354..097a1b81e1 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -43,6 +43,14 @@ private[spark] class SparkUI(
def this(conf: SparkConf, listenerBus: SparkListenerBus, appName: String, basePath: String) =
this(null, conf, new SecurityManager(conf), listenerBus, appName, basePath)
+ def this(
+ conf: SparkConf,
+ securityManager: SecurityManager,
+ listenerBus: SparkListenerBus,
+ appName: String,
+ basePath: String) =
+ this(null, conf, securityManager, listenerBus, appName, basePath)
+
// If SparkContext is not provided, assume the associated application is not live
val live = sc != null
diff --git a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
new file mode 100644
index 0000000000..e39093e24d
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.scalatest.FunSuite
+
+class SecurityManagerSuite extends FunSuite {
+
+ test("set security with conf") {
+ val conf = new SparkConf
+ conf.set("spark.authenticate", "true")
+ conf.set("spark.authenticate.secret", "good")
+ conf.set("spark.ui.acls.enable", "true")
+ conf.set("spark.ui.view.acls", "user1,user2")
+ val securityManager = new SecurityManager(conf);
+ assert(securityManager.isAuthenticationEnabled() === true)
+ assert(securityManager.uiAclsEnabled() === true)
+ assert(securityManager.checkUIViewPermissions("user1") === true)
+ assert(securityManager.checkUIViewPermissions("user2") === true)
+ assert(securityManager.checkUIViewPermissions("user3") === false)
+ }
+
+ test("set security with api") {
+ val conf = new SparkConf
+ conf.set("spark.ui.view.acls", "user1,user2")
+ val securityManager = new SecurityManager(conf);
+ securityManager.setUIAcls(true)
+ assert(securityManager.uiAclsEnabled() === true)
+ securityManager.setUIAcls(false)
+ assert(securityManager.uiAclsEnabled() === false)
+
+ // acls are off so doesn't matter what view acls set to
+ assert(securityManager.checkUIViewPermissions("user4") === true)
+
+ securityManager.setUIAcls(true)
+ assert(securityManager.uiAclsEnabled() === true)
+ securityManager.setViewAcls(ArrayBuffer[String]("user5"), "user6,user7")
+ assert(securityManager.checkUIViewPermissions("user1") === false)
+ assert(securityManager.checkUIViewPermissions("user5") === true)
+ assert(securityManager.checkUIViewPermissions("user6") === true)
+ assert(securityManager.checkUIViewPermissions("user7") === true)
+ assert(securityManager.checkUIViewPermissions("user8") === false)
+ assert(securityManager.checkUIViewPermissions(null) === true)
+ }
+}
+
diff --git a/docs/monitoring.md b/docs/monitoring.md
index 347a9b1f1a..6f35fc37c4 100644
--- a/docs/monitoring.md
+++ b/docs/monitoring.md
@@ -115,6 +115,19 @@ represents an application's event logs. This creates a web interface at
Location of the kerberos keytab file for the History Server.
</td>
</tr>
+ <tr>
+ <td>spark.history.ui.acls.enable</td>
+ <td>false</td>
+ <td>
+ Specifies whether acls should be checked to authorize users viewing the applications.
+ If enabled, access control checks are made regardless of what the individual application had
+ set for <code>spark.ui.acls.enable</code> when the application was run. The application owner
+ will always have authorization to view their own application and any users specified via
+ <code>spark.ui.view.acls</code> when the application was run will also have authorization
+ to view that application.
+ If disabled, no access control checks are made.
+ </td>
+ </tr>
</table>
Note that in all of these UIs, the tables are sortable by clicking their headers,