diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 81f3d1d36a342832a61c4073cd52f9daadf65f98..4d48ef694d68fb97a370412e90c232e3515bc184 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -1555,6 +1555,12 @@ class SQLTests(ReusedPySparkTestCase):
         self.assertEqual(now, now1)
         self.assertEqual(now, utcnow1)
 
+    # regression test for SPARK-19561
+    def test_datetime_at_epoch(self):
+        epoch = datetime.datetime.fromtimestamp(0)
+        df = self.spark.createDataFrame([Row(date=epoch)])
+        self.assertEqual(df.first()['date'], epoch)
+
     def test_decimal(self):
         from decimal import Decimal
         schema = StructType([StructField("decimal", DecimalType(10, 5))])
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index 26b54a7fb370965312c66e4a477319f798364a0e..1d31f25efad5210c0be8d114e8c8f0420b380f73 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -189,7 +189,7 @@ class TimestampType(AtomicType):
         if dt is not None:
             seconds = (calendar.timegm(dt.utctimetuple()) if dt.tzinfo
                        else time.mktime(dt.timetuple()))
-            return int(seconds) * 1000000 + dt.microsecond
+            return long(seconds) * 1000000 + dt.microsecond
 
     def fromInternal(self, ts):
         if ts is not None: