diff --git a/python/pyspark/sql/dataframe.py b/python/pyspark/sql/dataframe.py
index b4fa8368936a48430b124e0faafa10c17d4f134e..328bda6601565124e4d254aa56033fec4db8c877 100644
--- a/python/pyspark/sql/dataframe.py
+++ b/python/pyspark/sql/dataframe.py
@@ -326,9 +326,11 @@ class DataFrame(object):
         return self
 
     @since(1.3)
-    def unpersist(self, blocking=True):
+    def unpersist(self, blocking=False):
         """Marks the :class:`DataFrame` as non-persistent, and remove all blocks for it from
         memory and disk.
+
+        .. note:: `blocking` default has changed to False to match Scala in 2.0.
         """
         self.is_cached = False
         self._jdf.unpersist(blocking)
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index e4f79c911c0d90c79bef2f17e2da072ea97bfd5c..d4c221d7125ca808cea3b36cb8d6c58189fabf86 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -362,7 +362,7 @@ class SQLTests(ReusedPySparkTestCase):
         # cache and checkpoint
         self.assertFalse(df.is_cached)
         df.persist()
-        df.unpersist()
+        df.unpersist(True)
         df.cache()
         self.assertTrue(df.is_cached)
         self.assertEqual(2, df.count())