aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/rdd.py
diff options
context:
space:
mode:
authorprabinb <prabin.banka@imaginea.com>2014-03-11 23:57:05 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-03-11 23:57:05 -0700
commitaf7f2f10902c7b42e08797f7467dd06e4803594c (patch)
tree595b6989fc69b900d2d4f120d915d6f8a453c7d3 /python/pyspark/rdd.py
parent2409af9dcf238e1ad87080a389e05a696c41dc72 (diff)
downloadspark-af7f2f10902c7b42e08797f7467dd06e4803594c.tar.gz
spark-af7f2f10902c7b42e08797f7467dd06e4803594c.tar.bz2
spark-af7f2f10902c7b42e08797f7467dd06e4803594c.zip
Spark-1163, Added missing Python RDD functions
Author: prabinb <prabin.banka@imaginea.com> Closes #92 from prabinb/python-api-rdd and squashes the following commits: 51129ca [prabinb] Added missing Python RDD functions Added __repr__ function to StorageLevel class. Added doctest for RDD.getStorageLevel().
Diffstat (limited to 'python/pyspark/rdd.py')
-rw-r--r--python/pyspark/rdd.py42
1 files changed, 42 insertions, 0 deletions
diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py
index 39916d21c7..0f28dbd6fc 100644
--- a/python/pyspark/rdd.py
+++ b/python/pyspark/rdd.py
@@ -36,6 +36,7 @@ from pyspark.join import python_join, python_left_outer_join, \
python_right_outer_join, python_cogroup
from pyspark.statcounter import StatCounter
from pyspark.rddsampler import RDDSampler
+from pyspark.storagelevel import StorageLevel
from py4j.java_collections import ListConverter, MapConverter
@@ -1119,6 +1120,47 @@ class RDD(object):
other._jrdd_deserializer)
return RDD(pairRDD, self.ctx, deserializer)
+ def name(self):
+ """
+ Return the name of this RDD.
+ """
+ name_ = self._jrdd.name()
+ if not name_:
+ return None
+ return name_.encode('utf-8')
+
+ def setName(self, name):
+ """
+ Assign a name to this RDD.
+ >>> rdd1 = sc.parallelize([1,2])
+ >>> rdd1.setName('RDD1')
+ >>> rdd1.name()
+ 'RDD1'
+ """
+ self._jrdd.setName(name)
+
+ def toDebugString(self):
+ """
+ A description of this RDD and its recursive dependencies for debugging.
+ """
+ debug_string = self._jrdd.toDebugString()
+ if not debug_string:
+ return None
+ return debug_string.encode('utf-8')
+
+ def getStorageLevel(self):
+ """
+ Get the RDD's current storage level.
+ >>> rdd1 = sc.parallelize([1,2])
+ >>> rdd1.getStorageLevel()
+ StorageLevel(False, False, False, 1)
+ """
+ java_storage_level = self._jrdd.getStorageLevel()
+ storage_level = StorageLevel(java_storage_level.useDisk(),
+ java_storage_level.useMemory(),
+ java_storage_level.deserialized(),
+ java_storage_level.replication())
+ return storage_level
# TODO: `lookup` is disabled because we can't make direct comparisons based
# on the key; we need to compare the hash of the key to the hash of the