diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index 520b09d9c6f167c075a3d2da9c8216b3d123001c..fc41701b5922dbf5168ed0a3aa84a09ac1b3ce24 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -178,6 +178,11 @@ class DataTypeTests(unittest.TestCase): dt = DateType() self.assertEqual(dt.fromInternal(0), datetime.date(1970, 1, 1)) + # regression test for SPARK-17035 + def test_timestamp_microsecond(self): + tst = TimestampType() + self.assertEqual(tst.toInternal(datetime.datetime.max) % 1000000, 999999) + def test_empty_row(self): row = Row() self.assertEqual(len(row), 0) diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py index b765472d6edbc1be9a2be23de7a2d1d9bac001f3..11b1e60ee74f4b0369c69eac07272f673953fac6 100644 --- a/python/pyspark/sql/types.py +++ b/python/pyspark/sql/types.py @@ -189,7 +189,7 @@ class TimestampType(AtomicType): if dt is not None: seconds = (calendar.timegm(dt.utctimetuple()) if dt.tzinfo else time.mktime(dt.timetuple())) - return int(seconds * 1e6 + dt.microsecond) + return int(seconds) * 1000000 + dt.microsecond def fromInternal(self, ts): if ts is not None: