aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/session.py
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2016-06-28 22:07:11 -0700
committerShixiong Zhu <shixiong@databricks.com>2016-06-28 22:07:11 -0700
commitf454a7f9f03807dd768319798daa1351bbfc7288 (patch)
treed6a8faadf29cfb5d2496f89187b5316375257fa2 /python/pyspark/sql/session.py
parent153c2f9ac12846367a09684fd875c496d350a603 (diff)
downloadspark-f454a7f9f03807dd768319798daa1351bbfc7288.tar.gz
spark-f454a7f9f03807dd768319798daa1351bbfc7288.tar.bz2
spark-f454a7f9f03807dd768319798daa1351bbfc7288.zip
[SPARK-16266][SQL][STREAING] Moved DataStreamReader/Writer from pyspark.sql to pyspark.sql.streaming
## What changes were proposed in this pull request? - Moved DataStreamReader/Writer from pyspark.sql to pyspark.sql.streaming to make them consistent with scala packaging - Exposed the necessary classes in sql.streaming package so that they appear in the docs - Added pyspark.sql.streaming module to the docs ## How was this patch tested? - updated unit tests. - generated docs for testing visibility of pyspark.sql.streaming classes. Author: Tathagata Das <tathagata.das1565@gmail.com> Closes #13955 from tdas/SPARK-16266.
Diffstat (limited to 'python/pyspark/sql/session.py')
-rw-r--r--python/pyspark/sql/session.py3
1 files changed, 2 insertions, 1 deletions
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index b4152a34ad..55f86a16f5 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -31,7 +31,8 @@ from pyspark.rdd import RDD, ignore_unicode_prefix
from pyspark.sql.catalog import Catalog
from pyspark.sql.conf import RuntimeConfig
from pyspark.sql.dataframe import DataFrame
-from pyspark.sql.readwriter import DataFrameReader, DataStreamReader
+from pyspark.sql.readwriter import DataFrameReader
+from pyspark.sql.streaming import DataStreamReader
from pyspark.sql.types import Row, DataType, StringType, StructType, _verify_type, \
_infer_schema, _has_nulltype, _merge_type, _create_converter, _parse_datatype_string
from pyspark.sql.utils import install_exception_handler