aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/context.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/sql/context.py')
-rw-r--r--python/pyspark/sql/context.py5
1 files changed, 4 insertions, 1 deletions
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index e8e60c6412..486733a390 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -34,7 +34,10 @@ __all__ = ["SQLContext", "HiveContext", "UDFRegistration"]
class SQLContext(object):
- """Wrapper around :class:`SparkSession`, the main entry point to Spark SQL functionality.
+ """The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x.
+
+ As of Spark 2.0, this is replaced by :class:`SparkSession`. However, we are keeping the class
+ here for backward compatibility.
A SQLContext can be used create :class:`DataFrame`, register :class:`DataFrame` as
tables, execute SQL over tables, cache tables, and read parquet files.