From f454a7f9f03807dd768319798daa1351bbfc7288 Mon Sep 17 00:00:00 2001 From: Tathagata Das Date: Tue, 28 Jun 2016 22:07:11 -0700 Subject: [SPARK-16266][SQL][STREAING] Moved DataStreamReader/Writer from pyspark.sql to pyspark.sql.streaming ## What changes were proposed in this pull request? - Moved DataStreamReader/Writer from pyspark.sql to pyspark.sql.streaming to make them consistent with scala packaging - Exposed the necessary classes in sql.streaming package so that they appear in the docs - Added pyspark.sql.streaming module to the docs ## How was this patch tested? - updated unit tests. - generated docs for testing visibility of pyspark.sql.streaming classes. Author: Tathagata Das Closes #13955 from tdas/SPARK-16266. --- python/pyspark/sql/context.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'python/pyspark/sql/context.py') diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py index b5dde13ed7..3503fb90c3 100644 --- a/python/pyspark/sql/context.py +++ b/python/pyspark/sql/context.py @@ -26,7 +26,8 @@ from pyspark import since from pyspark.rdd import ignore_unicode_prefix from pyspark.sql.session import _monkey_patch_RDD, SparkSession from pyspark.sql.dataframe import DataFrame -from pyspark.sql.readwriter import DataFrameReader, DataStreamReader +from pyspark.sql.readwriter import DataFrameReader +from pyspark.sql.streaming import DataStreamReader from pyspark.sql.types import Row, StringType from pyspark.sql.utils import install_exception_handler -- cgit v1.2.3