aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-08-06 15:30:27 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2015-08-06 15:30:27 -0700
commite234ea1b49d30bb6c8b8c001bd98c43de290dcff (patch)
tree78111769b076ed89afab4e447572daad758bed68 /core
parent3504bf3aa9f7b75c0985f04ce2944833d8c5b5bd (diff)
downloadspark-e234ea1b49d30bb6c8b8c001bd98c43de290dcff.tar.gz
spark-e234ea1b49d30bb6c8b8c001bd98c43de290dcff.tar.bz2
spark-e234ea1b49d30bb6c8b8c001bd98c43de290dcff.zip
[SPARK-9645] [YARN] [CORE] Allow shuffle service to read shuffle files.
Spark should not mess with the permissions of directories created by the cluster manager. Here, by setting the block manager dir permissions to 700, the shuffle service (running as the YARN user) wouldn't be able to serve shuffle files created by applications. Also, the code to protect the local app dir was missing in standalone's Worker; that has been now added. Since all processes run as the same user in standalone, `chmod 700` should not cause problems. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #7966 from vanzin/SPARK-9645 and squashes the following commits: 6e07b31 [Marcelo Vanzin] Protect the app dir in standalone mode. 384ba6a [Marcelo Vanzin] [SPARK-9645] [yarn] [core] Allow shuffle service to read shuffle files.
Diffstat (limited to 'core')
-rwxr-xr-xcore/src/main/scala/org/apache/spark/deploy/worker/Worker.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala1
2 files changed, 3 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index 6792d3310b..79b1536d94 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -428,7 +428,9 @@ private[deploy] class Worker(
// application finishes.
val appLocalDirs = appDirectories.get(appId).getOrElse {
Utils.getOrCreateLocalRootDirs(conf).map { dir =>
- Utils.createDirectory(dir, namePrefix = "executor").getAbsolutePath()
+ val appDir = Utils.createDirectory(dir, namePrefix = "executor")
+ Utils.chmod700(appDir)
+ appDir.getAbsolutePath()
}.toSeq
}
appDirectories(appId) = appLocalDirs
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 5f537692a1..56a33d5ca7 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -133,7 +133,6 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
Utils.getConfiguredLocalDirs(conf).flatMap { rootDir =>
try {
val localDir = Utils.createDirectory(rootDir, "blockmgr")
- Utils.chmod700(localDir)
logInfo(s"Created local directory at $localDir")
Some(localDir)
} catch {