aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/streaming/tests.py
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-04-21 00:08:18 -0700
committerReynold Xin <rxin@databricks.com>2015-04-21 00:08:18 -0700
commitab9128fb7ec7ca479dc91e7cc7c775e8d071eafa (patch)
tree88b7b9582617ef0fda39de8c04e9b0fdde030533 /python/pyspark/streaming/tests.py
parent8136810dfad12008ac300116df7bc8448740f1ae (diff)
downloadspark-ab9128fb7ec7ca479dc91e7cc7c775e8d071eafa.tar.gz
spark-ab9128fb7ec7ca479dc91e7cc7c775e8d071eafa.tar.bz2
spark-ab9128fb7ec7ca479dc91e7cc7c775e8d071eafa.zip
[SPARK-6949] [SQL] [PySpark] Support Date/Timestamp in Column expression
This PR enable auto_convert in JavaGateway, then we could register a converter for a given types, for example, date and datetime. There are two bugs related to auto_convert, see [1] and [2], we workaround it in this PR. [1] https://github.com/bartdag/py4j/issues/160 [2] https://github.com/bartdag/py4j/issues/161 cc rxin JoshRosen Author: Davies Liu <davies@databricks.com> Closes #5570 from davies/py4j_date and squashes the following commits: eb4fa53 [Davies Liu] fix tests in python 3 d17d634 [Davies Liu] rollback changes in mllib 2e7566d [Davies Liu] convert tuple into ArrayList ceb3779 [Davies Liu] Update rdd.py 3c373f3 [Davies Liu] support date and datetime by auto_convert cb094ff [Davies Liu] enable auto convert
Diffstat (limited to 'python/pyspark/streaming/tests.py')
-rw-r--r--python/pyspark/streaming/tests.py6
1 files changed, 1 insertions, 5 deletions
diff --git a/python/pyspark/streaming/tests.py b/python/pyspark/streaming/tests.py
index 06d2215437..33f958a601 100644
--- a/python/pyspark/streaming/tests.py
+++ b/python/pyspark/streaming/tests.py
@@ -24,8 +24,6 @@ import tempfile
import struct
from functools import reduce
-from py4j.java_collections import MapConverter
-
from pyspark.context import SparkConf, SparkContext, RDD
from pyspark.streaming.context import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
@@ -581,11 +579,9 @@ class KafkaStreamTests(PySparkStreamingTestCase):
"""Test the Python Kafka stream API."""
topic = "topic1"
sendData = {"a": 3, "b": 5, "c": 10}
- jSendData = MapConverter().convert(sendData,
- self.ssc.sparkContext._gateway._gateway_client)
self._kafkaTestUtils.createTopic(topic)
- self._kafkaTestUtils.sendMessages(topic, jSendData)
+ self._kafkaTestUtils.sendMessages(topic, sendData)
stream = KafkaUtils.createStream(self.ssc, self._kafkaTestUtils.zkAddress(),
"test-streaming-consumer", {topic: 1},