aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark')
-rw-r--r--python/pyspark/sql/streaming.py3
-rw-r--r--python/pyspark/sql/utils.py2
2 files changed, 3 insertions, 2 deletions
diff --git a/python/pyspark/sql/streaming.py b/python/pyspark/sql/streaming.py
index 8238b8e7cd..cd75622ced 100644
--- a/python/pyspark/sql/streaming.py
+++ b/python/pyspark/sql/streaming.py
@@ -201,7 +201,8 @@ class ProcessingTime(Trigger):
self.interval = interval
def _to_java_trigger(self, sqlContext):
- return sqlContext._sc._jvm.org.apache.spark.sql.ProcessingTime.create(self.interval)
+ return sqlContext._sc._jvm.org.apache.spark.sql.streaming.ProcessingTime.create(
+ self.interval)
def _test():
diff --git a/python/pyspark/sql/utils.py b/python/pyspark/sql/utils.py
index 8c8768f50b..9ddaf78acf 100644
--- a/python/pyspark/sql/utils.py
+++ b/python/pyspark/sql/utils.py
@@ -71,7 +71,7 @@ def capture_sql_exception(f):
raise AnalysisException(s.split(': ', 1)[1], stackTrace)
if s.startswith('org.apache.spark.sql.catalyst.parser.ParseException: '):
raise ParseException(s.split(': ', 1)[1], stackTrace)
- if s.startswith('org.apache.spark.sql.ContinuousQueryException: '):
+ if s.startswith('org.apache.spark.sql.streaming.ContinuousQueryException: '):
raise ContinuousQueryException(s.split(': ', 1)[1], stackTrace)
if s.startswith('org.apache.spark.sql.execution.QueryExecutionException: '):
raise QueryExecutionException(s.split(': ', 1)[1], stackTrace)