aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark
diff options
context:
space:
mode:
authorMatthew Rocklin <mrocklin@gmail.com>2014-09-08 15:45:28 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-08 15:45:36 -0700
commit939a322c85956eda150b10afb2ed1d8d959a7bdf (patch)
tree8e717dcb85af3e3970682cf0325c0800331221cb /python/pyspark
parent26bc7655de18ab0191ded3f75cb77bc756dc1c03 (diff)
downloadspark-939a322c85956eda150b10afb2ed1d8d959a7bdf.tar.gz
spark-939a322c85956eda150b10afb2ed1d8d959a7bdf.tar.bz2
spark-939a322c85956eda150b10afb2ed1d8d959a7bdf.zip
[SPARK-3417] Use new-style classes in PySpark
Tiny PR making SQLContext a new-style class. This allows various type logic to work more effectively ```Python In [1]: import pyspark In [2]: pyspark.sql.SQLContext.mro() Out[2]: [pyspark.sql.SQLContext, object] ``` Author: Matthew Rocklin <mrocklin@gmail.com> Closes #2288 from mrocklin/sqlcontext-new-style-class and squashes the following commits: 4aadab6 [Matthew Rocklin] update other old-style classes a2dc02f [Matthew Rocklin] pyspark.sql.SQLContext is new-style class
Diffstat (limited to 'python/pyspark')
-rw-r--r--python/pyspark/mllib/random.py2
-rw-r--r--python/pyspark/mllib/util.py2
-rw-r--r--python/pyspark/sql.py2
-rw-r--r--python/pyspark/storagelevel.py2
4 files changed, 4 insertions, 4 deletions
diff --git a/python/pyspark/mllib/random.py b/python/pyspark/mllib/random.py
index 3e59c73db8..d53c95fd59 100644
--- a/python/pyspark/mllib/random.py
+++ b/python/pyspark/mllib/random.py
@@ -28,7 +28,7 @@ from pyspark.serializers import NoOpSerializer
__all__ = ['RandomRDDs', ]
-class RandomRDDs:
+class RandomRDDs(object):
"""
Generator methods for creating RDDs comprised of i.i.d samples from
some distribution.
diff --git a/python/pyspark/mllib/util.py b/python/pyspark/mllib/util.py
index 4962d05491..1c7b8c809a 100644
--- a/python/pyspark/mllib/util.py
+++ b/python/pyspark/mllib/util.py
@@ -25,7 +25,7 @@ from pyspark.rdd import RDD
from pyspark.serializers import NoOpSerializer
-class MLUtils:
+class MLUtils(object):
"""
Helper methods to load, save and pre-process data used in MLlib.
diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 004d4937cb..53eea6d6cf 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -899,7 +899,7 @@ def _create_cls(dataType):
return Row
-class SQLContext:
+class SQLContext(object):
"""Main entry point for Spark SQL functionality.
diff --git a/python/pyspark/storagelevel.py b/python/pyspark/storagelevel.py
index 2aa0fb9d2c..676aa0f714 100644
--- a/python/pyspark/storagelevel.py
+++ b/python/pyspark/storagelevel.py
@@ -18,7 +18,7 @@
__all__ = ["StorageLevel"]
-class StorageLevel:
+class StorageLevel(object):
"""
Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,