diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 5a30431568b1642b78af1c4eee678af54d770f24..84bc0a3b7ccd0bd9b969419532017290195185be 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -232,6 +232,20 @@ class SparkContext(object):
                 else:
                     SparkContext._active_spark_context = instance
 
+    def __enter__(self):
+        """
+        Enable 'with SparkContext(...) as sc: app(sc)' syntax.
+        """
+        return self
+
+    def __exit__(self, type, value, trace):
+        """
+        Enable 'with SparkContext(...) as sc: app' syntax.
+
+        Specifically stop the context on exit of the with block.
+        """
+        self.stop()
+
     @classmethod
     def setSystemProperty(cls, key, value):
         """
diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index 0bd2a9e6c507d136999c7a591bd2a3d972cedca7..bb84ebe72cb24ea7dc2536150f142e94f1360ca4 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -1254,6 +1254,35 @@ class TestSparkSubmit(unittest.TestCase):
         self.assertIn("[2, 4, 6]", out)
 
 
+class ContextStopTests(unittest.TestCase):
+
+    def test_stop(self):
+        sc = SparkContext()
+        self.assertNotEqual(SparkContext._active_spark_context, None)
+        sc.stop()
+        self.assertEqual(SparkContext._active_spark_context, None)
+
+    def test_with(self):
+        with SparkContext() as sc:
+            self.assertNotEqual(SparkContext._active_spark_context, None)
+        self.assertEqual(SparkContext._active_spark_context, None)
+
+    def test_with_exception(self):
+        try:
+            with SparkContext() as sc:
+                self.assertNotEqual(SparkContext._active_spark_context, None)
+                raise Exception()
+        except:
+            pass
+        self.assertEqual(SparkContext._active_spark_context, None)
+
+    def test_with_stop(self):
+        with SparkContext() as sc:
+            self.assertNotEqual(SparkContext._active_spark_context, None)
+            sc.stop()
+        self.assertEqual(SparkContext._active_spark_context, None)
+
+
 @unittest.skipIf(not _have_scipy, "SciPy not installed")
 class SciPyTests(PySparkTestCase):