diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index b50590ab3b444709d0f36e1645e885f03df1f5e9..b4c82f519bd53605c98edfc66b5be42ab0081b30 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -100,6 +100,12 @@ class SparkConf(object):
         self._jconf.set(key, unicode(value))
         return self
 
+    def setIfMissing(self, key, value):
+        """Set a configuration property, if not already set."""
+        if self.get(key) is None:
+            self.set(key, value)
+        return self
+
     def setMaster(self, value):
         """Set master URL to connect to."""
         self._jconf.setMaster(value)
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index e21be0e10a3f786905989636f11c4894d1061516..024fb881877c9ffafbfdc52aa53bfe520c99ad6e 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -101,7 +101,7 @@ class SparkContext(object):
         else:
             self.serializer = BatchedSerializer(self._unbatched_serializer,
                                                 batchSize)
-
+        self._conf.setIfMissing("spark.rdd.compress", "true")
         # Set any parameters passed directly to us on the conf
         if master:
             self._conf.setMaster(master)
diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py
index 94ba22306afbddce6cbd98dbaadae7e0445d3a1a..a38dd0b9237c5c29c56ca36370d7cbaa14419ec0 100644
--- a/python/pyspark/rdd.py
+++ b/python/pyspark/rdd.py
@@ -231,10 +231,10 @@ class RDD(object):
 
     def cache(self):
         """
-        Persist this RDD with the default storage level (C{MEMORY_ONLY}).
+        Persist this RDD with the default storage level (C{MEMORY_ONLY_SER}).
         """
         self.is_cached = True
-        self._jrdd.cache()
+        self.persist(StorageLevel.MEMORY_ONLY_SER)
         return self
 
     def persist(self, storageLevel):