Skip to content
Snippets Groups Projects
Commit 44233865 authored by Michael Armbrust's avatar Michael Armbrust Committed by Reynold Xin
Browse files

[SQL] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.

Author: Michael Armbrust <michael@databricks.com>

Closes #761 from marmbrus/existingContext and squashes the following commits:

4651051 [Michael Armbrust] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
parent 753b04de
No related branches found
No related tags found
No related merge requests found
......@@ -28,7 +28,7 @@ class SQLContext:
register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
"""
def __init__(self, sparkContext):
def __init__(self, sparkContext, sqlContext = None):
"""
Create a new SQLContext.
......@@ -58,10 +58,13 @@ class SQLContext:
self._jvm = self._sc._jvm
self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
if sqlContext:
self._scala_SQLContext = sqlContext
@property
def _ssql_ctx(self):
"""
Accessor for the JVM SparkSQL context. Subclasses can overrite this property to provide
Accessor for the JVM SparkSQL context. Subclasses can override this property to provide
their own JVM Contexts.
"""
if not hasattr(self, '_scala_SQLContext'):
......
......@@ -33,9 +33,9 @@ import org.apache.spark.util.Utils
/**
* The entry point for executing Spark SQL queries from a Java program.
*/
class JavaSQLContext(sparkContext: JavaSparkContext) {
class JavaSQLContext(val sqlContext: SQLContext) {
val sqlContext = new SQLContext(sparkContext.sc)
def this(sparkContext: JavaSparkContext) = this(new SQLContext(sparkContext.sc))
/**
* Executes a query expressed in SQL, returning the result as a JavaSchemaRDD
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment