diff options
-rw-r--r-- | python/pyspark/sql/tests.py | 6 | ||||
-rw-r--r-- | python/pyspark/sql/types.py | 2 |
2 files changed, 1 insertions, 7 deletions
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index 4d48ef694d..81f3d1d36a 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -1555,12 +1555,6 @@ class SQLTests(ReusedPySparkTestCase): self.assertEqual(now, now1) self.assertEqual(now, utcnow1) - # regression test for SPARK-19561 - def test_datetime_at_epoch(self): - epoch = datetime.datetime.fromtimestamp(0) - df = self.spark.createDataFrame([Row(date=epoch)]) - self.assertEqual(df.first()['date'], epoch) - def test_decimal(self): from decimal import Decimal schema = StructType([StructField("decimal", DecimalType(10, 5))]) diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py index 1d31f25efa..26b54a7fb3 100644 --- a/python/pyspark/sql/types.py +++ b/python/pyspark/sql/types.py @@ -189,7 +189,7 @@ class TimestampType(AtomicType): if dt is not None: seconds = (calendar.timegm(dt.utctimetuple()) if dt.tzinfo else time.mktime(dt.timetuple())) - return long(seconds) * 1000000 + dt.microsecond + return int(seconds) * 1000000 + dt.microsecond def fromInternal(self, ts): if ts is not None: |