diff options
author | Josh Rosen <joshrosen@eecs.berkeley.edu> | 2013-01-01 13:52:14 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@eecs.berkeley.edu> | 2013-01-01 13:52:14 -0800 |
commit | 170e451fbdd308ae77065bd9c0f2bd278abf0cb7 (patch) | |
tree | da3df59e2262dac4b381227d5bc712502249d746 /pyspark/pyspark/__init__.py | |
parent | 6f6a6b79c4c3f3555f8ff427c91e714d02afe8fa (diff) | |
download | spark-170e451fbdd308ae77065bd9c0f2bd278abf0cb7.tar.gz spark-170e451fbdd308ae77065bd9c0f2bd278abf0cb7.tar.bz2 spark-170e451fbdd308ae77065bd9c0f2bd278abf0cb7.zip |
Minor documentation and style fixes for PySpark.
Diffstat (limited to 'pyspark/pyspark/__init__.py')
-rw-r--r-- | pyspark/pyspark/__init__.py | 13 |
1 files changed, 12 insertions, 1 deletions
diff --git a/pyspark/pyspark/__init__.py b/pyspark/pyspark/__init__.py index 8f8402b62b..1ab360a666 100644 --- a/pyspark/pyspark/__init__.py +++ b/pyspark/pyspark/__init__.py @@ -1,9 +1,20 @@ +""" +PySpark is a Python API for Spark. + +Public classes: + + - L{SparkContext<pyspark.context.SparkContext>} + Main entry point for Spark functionality. + - L{RDD<pyspark.rdd.RDD>} + A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. +""" import sys import os sys.path.insert(0, os.path.join(os.environ["SPARK_HOME"], "pyspark/lib/py4j0.7.egg")) from pyspark.context import SparkContext +from pyspark.rdd import RDD -__all__ = ["SparkContext"] +__all__ = ["SparkContext", "RDD"] |