diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 6789d7002b3b716874b3b63e176b73a3b5af07df..bbe69e7d8f89b7510d5c5ef93bb7d9e61cc27d01 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -28,7 +28,7 @@ class SQLContext:
     register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
     """
 
-    def __init__(self, sparkContext):
+    def __init__(self, sparkContext, sqlContext = None):
         """
         Create a new SQLContext.
 
@@ -58,10 +58,13 @@ class SQLContext:
         self._jvm = self._sc._jvm
         self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
 
+        if sqlContext:
+            self._scala_SQLContext = sqlContext
+
     @property
     def _ssql_ctx(self):
         """
-        Accessor for the JVM SparkSQL context.  Subclasses can overrite this property to provide
+        Accessor for the JVM SparkSQL context.  Subclasses can override this property to provide
         their own JVM Contexts.
         """
         if not hasattr(self, '_scala_SQLContext'):
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
index 57facbe10fc96826ed961c6324891a96e06866e8..6f7d431b9a8190d51089af11be928898350d23e8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
@@ -33,9 +33,9 @@ import org.apache.spark.util.Utils
 /**
  * The entry point for executing Spark SQL queries from a Java program.
  */
-class JavaSQLContext(sparkContext: JavaSparkContext) {
+class JavaSQLContext(val sqlContext: SQLContext) {
 
-  val sqlContext = new SQLContext(sparkContext.sc)
+  def this(sparkContext: JavaSparkContext) = this(new SQLContext(sparkContext.sc))
 
   /**
    * Executes a query expressed in SQL, returning the result as a JavaSchemaRDD