aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/files.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/files.py')
-rw-r--r--python/pyspark/files.py20
1 files changed, 17 insertions, 3 deletions
diff --git a/python/pyspark/files.py b/python/pyspark/files.py
index de1334f046..98f6a399cc 100644
--- a/python/pyspark/files.py
+++ b/python/pyspark/files.py
@@ -4,13 +4,15 @@ import os
class SparkFiles(object):
"""
Resolves paths to files added through
- L{addFile()<pyspark.context.SparkContext.addFile>}.
+ L{SparkContext.addFile()<pyspark.context.SparkContext.addFile>}.
SparkFiles contains only classmethods; users should not create SparkFiles
instances.
"""
_root_directory = None
+ _is_running_on_worker = False
+ _sc = None
def __init__(self):
raise NotImplementedError("Do not construct SparkFiles objects")
@@ -18,7 +20,19 @@ class SparkFiles(object):
@classmethod
def get(cls, filename):
"""
- Get the absolute path of a file added through C{addFile()}.
+ Get the absolute path of a file added through C{SparkContext.addFile()}.
"""
- path = os.path.join(SparkFiles._root_directory, filename)
+ path = os.path.join(SparkFiles.getRootDirectory(), filename)
return os.path.abspath(path)
+
+ @classmethod
+ def getRootDirectory(cls):
+ """
+ Get the root directory that contains files added through
+ C{SparkContext.addFile()}.
+ """
+ if cls._is_running_on_worker:
+ return cls._root_directory
+ else:
+ # This will have to change if we support multiple SparkContexts:
+ return cls._sc.jvm.spark.SparkFiles.getRootDirectory()