aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-06-01 10:14:40 -0700
committerMichael Armbrust <michael@databricks.com>2016-06-01 10:14:40 -0700
commita71d1364ae87aa388128da34dd0b9b02ff85e458 (patch)
tree01ea1e6aae1a2d7f76996374d7ac77bb5bddb2b6 /python
parentd5012c274036463c47a751cfe9977ade3a68b668 (diff)
downloadspark-a71d1364ae87aa388128da34dd0b9b02ff85e458.tar.gz
spark-a71d1364ae87aa388128da34dd0b9b02ff85e458.tar.bz2
spark-a71d1364ae87aa388128da34dd0b9b02ff85e458.zip
[SPARK-15686][SQL] Move user-facing streaming classes into sql.streaming
## What changes were proposed in this pull request? This patch moves all user-facing structured streaming classes into sql.streaming. As part of this, I also added some since version annotation to methods and classes that don't have them. ## How was this patch tested? Updated tests to reflect the moves. Author: Reynold Xin <rxin@databricks.com> Closes #13429 from rxin/SPARK-15686.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/sql/streaming.py3
-rw-r--r--python/pyspark/sql/utils.py2
2 files changed, 3 insertions, 2 deletions
diff --git a/python/pyspark/sql/streaming.py b/python/pyspark/sql/streaming.py
index 8238b8e7cd..cd75622ced 100644
--- a/python/pyspark/sql/streaming.py
+++ b/python/pyspark/sql/streaming.py
@@ -201,7 +201,8 @@ class ProcessingTime(Trigger):
self.interval = interval
def _to_java_trigger(self, sqlContext):
- return sqlContext._sc._jvm.org.apache.spark.sql.ProcessingTime.create(self.interval)
+ return sqlContext._sc._jvm.org.apache.spark.sql.streaming.ProcessingTime.create(
+ self.interval)
def _test():
diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py
index 8c8768f50b..9ddaf78acf 100644
--- a/python/pyspark/sql/utils.py
+++ b/python/pyspark/sql/utils.py
@@ -71,7 +71,7 @@ def capture_sql_exception(f):
raise AnalysisException(s.split(': ', 1)[1], stackTrace)
if s.startswith('org.apache.spark.sql.catalyst.parser.ParseException: '):
raise ParseException(s.split(': ', 1)[1], stackTrace)
- if s.startswith('org.apache.spark.sql.ContinuousQueryException: '):
+ if s.startswith('org.apache.spark.sql.streaming.ContinuousQueryException: '):
raise ContinuousQueryException(s.split(': ', 1)[1], stackTrace)
if s.startswith('org.apache.spark.sql.execution.QueryExecutionException: '):
raise QueryExecutionException(s.split(': ', 1)[1], stackTrace)