aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--python/docs/index.rst1
-rw-r--r--python/docs/pyspark.sql.rst2
-rw-r--r--python/pyspark/sql/__init__.py11
3 files changed, 5 insertions, 9 deletions
diff --git a/python/docs/index.rst b/python/docs/index.rst
index 306ffdb0e0..421c8de86a 100644
--- a/python/docs/index.rst
+++ b/python/docs/index.rst
@@ -50,4 +50,3 @@ Indices and tables
==================
* :ref:`search`
-
diff --git a/python/docs/pyspark.sql.rst b/python/docs/pyspark.sql.rst
index 3be9533c12..09848b8801 100644
--- a/python/docs/pyspark.sql.rst
+++ b/python/docs/pyspark.sql.rst
@@ -8,14 +8,12 @@ Module Context
:members:
:undoc-members:
-
pyspark.sql.types module
------------------------
.. automodule:: pyspark.sql.types
:members:
:undoc-members:
-
pyspark.sql.functions module
----------------------------
.. automodule:: pyspark.sql.functions
diff --git a/python/pyspark/sql/__init__.py b/python/pyspark/sql/__init__.py
index cff73ff192..22ec416f6c 100644
--- a/python/pyspark/sql/__init__.py
+++ b/python/pyspark/sql/__init__.py
@@ -18,7 +18,7 @@
"""
Important classes of Spark SQL and DataFrames:
- - :class:`pyspark.sql.SQLContext`
+ - :class:`pyspark.sql.SparkSession`
Main entry point for :class:`DataFrame` and SQL functionality.
- :class:`pyspark.sql.DataFrame`
A distributed collection of data grouped into named columns.
@@ -26,8 +26,6 @@ Important classes of Spark SQL and DataFrames:
A column expression in a :class:`DataFrame`.
- :class:`pyspark.sql.Row`
A row of data in a :class:`DataFrame`.
- - :class:`pyspark.sql.HiveContext`
- Main entry point for accessing data stored in Apache Hive.
- :class:`pyspark.sql.GroupedData`
Aggregation methods, returned by :func:`DataFrame.groupBy`.
- :class:`pyspark.sql.DataFrameNaFunctions`
@@ -45,7 +43,7 @@ from __future__ import absolute_import
from pyspark.sql.types import Row
-from pyspark.sql.context import SQLContext, HiveContext
+from pyspark.sql.context import SQLContext, HiveContext, UDFRegistration
from pyspark.sql.session import SparkSession
from pyspark.sql.column import Column
from pyspark.sql.dataframe import DataFrame, DataFrameNaFunctions, DataFrameStatFunctions
@@ -55,7 +53,8 @@ from pyspark.sql.window import Window, WindowSpec
__all__ = [
- 'SparkSession', 'SQLContext', 'HiveContext', 'DataFrame', 'GroupedData', 'Column',
- 'Row', 'DataFrameNaFunctions', 'DataFrameStatFunctions', 'Window', 'WindowSpec',
+ 'SparkSession', 'SQLContext', 'HiveContext', 'UDFRegistration',
+ 'DataFrame', 'GroupedData', 'Column', 'Row',
+ 'DataFrameNaFunctions', 'DataFrameStatFunctions', 'Window', 'WindowSpec',
'DataFrameReader', 'DataFrameWriter'
]