diff options
author | Aaron Davidson <aaron@databricks.com> | 2013-09-05 23:36:27 -0700 |
---|---|---|
committer | Aaron Davidson <aaron@databricks.com> | 2013-09-05 23:36:27 -0700 |
commit | a63d4c7dc2970900b116f7287e3d6b302d9d5698 (patch) | |
tree | dc30e2dd8435e8a3cb95717c39e491be630fcae3 /python/pyspark/context.py | |
parent | 714e7f9e32590c302ad315b7cbee72b2e8b32b9b (diff) | |
download | spark-a63d4c7dc2970900b116f7287e3d6b302d9d5698.tar.gz spark-a63d4c7dc2970900b116f7287e3d6b302d9d5698.tar.bz2 spark-a63d4c7dc2970900b116f7287e3d6b302d9d5698.zip |
SPARK-660: Add StorageLevel support in Python
It uses reflection... I am not proud of that fact, but it at least ensures
compatibility (sans refactoring of the StorageLevel stuff).
Diffstat (limited to 'python/pyspark/context.py')
-rw-r--r-- | python/pyspark/context.py | 14 |
1 files changed, 14 insertions, 0 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 8fbf296509..49f9b4610d 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -279,6 +279,20 @@ class SparkContext(object): """ self._jsc.sc().setCheckpointDir(dirName, useExisting) +class StorageLevelReader: + """ + Mimics the Scala StorageLevel by directing all attribute requests + (e.g., StorageLevel.DISK_ONLY) to the JVM for reflection. + """ + + def __init__(self, sc): + self.sc = sc + + def __getattr__(self, name): + try: + return self.sc._jvm.PythonRDD.getStorageLevel(name) + except: + print "Failed to find StorageLevel:", name def _test(): import atexit |