diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 950e275adbf01d4c2c044979428251b57eff0e4c..36040463e62a96019c0817a9e5aa990f407cd725 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -912,6 +912,8 @@ class SQLContext:
         """Create a new SQLContext.
 
         @param sparkContext: The SparkContext to wrap.
+        @param sqlContext: An optional JVM Scala SQLContext. If set, we do not instatiate a new
+        SQLContext in the JVM, instead we make all calls to this object.
 
         >>> srdd = sqlCtx.inferSchema(rdd)
         >>> sqlCtx.inferSchema(srdd) # doctest: +IGNORE_EXCEPTION_DETAIL
@@ -1315,6 +1317,18 @@ class HiveContext(SQLContext):
     It supports running both SQL and HiveQL commands.
     """
 
+    def __init__(self, sparkContext, hiveContext=None):
+        """Create a new HiveContext.
+
+        @param sparkContext: The SparkContext to wrap.
+        @param hiveContext: An optional JVM Scala HiveContext. If set, we do not instatiate a new
+        HiveContext in the JVM, instead we make all calls to this object.
+        """
+        SQLContext.__init__(self, sparkContext)
+
+        if hiveContext:
+            self._scala_HiveContext = hiveContext
+
     @property
     def _ssql_ctx(self):
         try: