aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/streaming/tests.py
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-06-01 14:40:08 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2015-06-01 14:40:08 -0700
commitb7ab0299b03ae833d5811f380e4594837879f8ae (patch)
tree1f2db456f4609be0e06f1eb232b65305181801c1 /python/pyspark/streaming/tests.py
parente7c7e51f2ec158d12a8429f753225c746f92d513 (diff)
downloadspark-b7ab0299b03ae833d5811f380e4594837879f8ae.tar.gz
spark-b7ab0299b03ae833d5811f380e4594837879f8ae.tar.bz2
spark-b7ab0299b03ae833d5811f380e4594837879f8ae.zip
[SPARK-7497] [PYSPARK] [STREAMING] fix streaming flaky tests
Increase the duration and timeout in streaming python tests. Author: Davies Liu <davies@databricks.com> Closes #6239 from davies/flaky_tests and squashes the following commits: d6aee8f [Davies Liu] fix window tests 26317f7 [Davies Liu] Merge branch 'master' of github.com:apache/spark into flaky_tests 7947db6 [Davies Liu] fix streaming flaky tests
Diffstat (limited to 'python/pyspark/streaming/tests.py')
-rw-r--r--python/pyspark/streaming/tests.py16
1 files changed, 8 insertions, 8 deletions
diff --git a/python/pyspark/streaming/tests.py b/python/pyspark/streaming/tests.py
index 33ea8c9293..46cb18b2e8 100644
--- a/python/pyspark/streaming/tests.py
+++ b/python/pyspark/streaming/tests.py
@@ -41,8 +41,8 @@ from pyspark.streaming.kafka import Broker, KafkaUtils, OffsetRange, TopicAndPar
class PySparkStreamingTestCase(unittest.TestCase):
- timeout = 4 # seconds
- duration = .2
+ timeout = 10 # seconds
+ duration = .5
@classmethod
def setUpClass(cls):
@@ -379,13 +379,13 @@ class BasicOperationTests(PySparkStreamingTestCase):
class WindowFunctionTests(PySparkStreamingTestCase):
- timeout = 5
+ timeout = 15
def test_window(self):
input = [range(1), range(2), range(3), range(4), range(5)]
def func(dstream):
- return dstream.window(.6, .2).count()
+ return dstream.window(1.5, .5).count()
expected = [[1], [3], [6], [9], [12], [9], [5]]
self._test_func(input, func, expected)
@@ -394,7 +394,7 @@ class WindowFunctionTests(PySparkStreamingTestCase):
input = [range(1), range(2), range(3), range(4), range(5)]
def func(dstream):
- return dstream.countByWindow(.6, .2)
+ return dstream.countByWindow(1.5, .5)
expected = [[1], [3], [6], [9], [12], [9], [5]]
self._test_func(input, func, expected)
@@ -403,7 +403,7 @@ class WindowFunctionTests(PySparkStreamingTestCase):
input = [range(1), range(2), range(3), range(4), range(5), range(6)]
def func(dstream):
- return dstream.countByWindow(1, .2)
+ return dstream.countByWindow(2.5, .5)
expected = [[1], [3], [6], [10], [15], [20], [18], [15], [11], [6]]
self._test_func(input, func, expected)
@@ -412,7 +412,7 @@ class WindowFunctionTests(PySparkStreamingTestCase):
input = [range(1), range(2), range(3), range(4), range(5), range(6)]
def func(dstream):
- return dstream.countByValueAndWindow(1, .2)
+ return dstream.countByValueAndWindow(2.5, .5)
expected = [[1], [2], [3], [4], [5], [6], [6], [6], [6], [6]]
self._test_func(input, func, expected)
@@ -421,7 +421,7 @@ class WindowFunctionTests(PySparkStreamingTestCase):
input = [[('a', i)] for i in range(5)]
def func(dstream):
- return dstream.groupByKeyAndWindow(.6, .2).mapValues(list)
+ return dstream.groupByKeyAndWindow(1.5, .5).mapValues(list)
expected = [[('a', [0])], [('a', [0, 1])], [('a', [0, 1, 2])], [('a', [1, 2, 3])],
[('a', [2, 3, 4])], [('a', [3, 4])], [('a', [4])]]