diff options
author | Prashant Sharma <prashant.s@imaginea.com> | 2014-01-03 12:12:04 +0530 |
---|---|---|
committer | Prashant Sharma <prashant.s@imaginea.com> | 2014-01-03 12:12:04 +0530 |
commit | b4bb80002bbf0ac3642c78ae9e5c260b5da4a4cc (patch) | |
tree | 0987c307777ba5947b43aee59233df6f3568a783 /python/pyspark/__init__.py | |
parent | 08ec10de1767ca543047b79c40ab50a04ce5df2f (diff) | |
parent | 498a5f0a1c6e82a33c2ad8c48b68bbdb8da57a95 (diff) | |
download | spark-b4bb80002bbf0ac3642c78ae9e5c260b5da4a4cc.tar.gz spark-b4bb80002bbf0ac3642c78ae9e5c260b5da4a4cc.tar.bz2 spark-b4bb80002bbf0ac3642c78ae9e5c260b5da4a4cc.zip |
Merge branch 'master' into spark-1002-remove-jars
Diffstat (limited to 'python/pyspark/__init__.py')
-rw-r--r-- | python/pyspark/__init__.py | 32 |
1 files changed, 19 insertions, 13 deletions
diff --git a/python/pyspark/__init__.py b/python/pyspark/__init__.py index 1f35f6f939..2b2c3a061a 100644 --- a/python/pyspark/__init__.py +++ b/python/pyspark/__init__.py @@ -20,28 +20,34 @@ PySpark is the Python API for Spark. Public classes: - - L{SparkContext<pyspark.context.SparkContext>} - Main entry point for Spark functionality. - - L{RDD<pyspark.rdd.RDD>} - A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. - - L{Broadcast<pyspark.broadcast.Broadcast>} - A broadcast variable that gets reused across tasks. - - L{Accumulator<pyspark.accumulators.Accumulator>} - An "add-only" shared variable that tasks can only add values to. - - L{SparkFiles<pyspark.files.SparkFiles>} - Access files shipped with jobs. - - L{StorageLevel<pyspark.storagelevel.StorageLevel>} - Finer-grained cache persistence levels. + - L{SparkContext<pyspark.context.SparkContext>} + Main entry point for Spark functionality. + - L{RDD<pyspark.rdd.RDD>} + A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. + - L{Broadcast<pyspark.broadcast.Broadcast>} + A broadcast variable that gets reused across tasks. + - L{Accumulator<pyspark.accumulators.Accumulator>} + An "add-only" shared variable that tasks can only add values to. + - L{SparkConf<pyspark.conf.SparkConf>} + For configuring Spark. + - L{SparkFiles<pyspark.files.SparkFiles>} + Access files shipped with jobs. + - L{StorageLevel<pyspark.storagelevel.StorageLevel>} + Finer-grained cache persistence levels. """ + + + import sys import os sys.path.insert(0, os.path.join(os.environ["SPARK_HOME"], "python/lib/py4j0.7.egg")) +from pyspark.conf import SparkConf from pyspark.context import SparkContext from pyspark.rdd import RDD from pyspark.files import SparkFiles from pyspark.storagelevel import StorageLevel -__all__ = ["SparkContext", "RDD", "SparkFiles", "StorageLevel"] +__all__ = ["SparkConf", "SparkContext", "RDD", "SparkFiles", "StorageLevel"] |