Skip to content
Snippets Groups Projects
Commit db573fc7 authored by Sandeep Singh's avatar Sandeep Singh Committed by Andrew Or
Browse files

[SPARK-15072][SQL][PYSPARK] FollowUp: Remove SparkSession.withHiveSupport in PySpark

## What changes were proposed in this pull request?
This is a followup of https://github.com/apache/spark/pull/12851
Remove `SparkSession.withHiveSupport` in PySpark and instead use `SparkSession.builder. enableHiveSupport`

## How was this patch tested?
Existing tests.

Author: Sandeep Singh <sandeep@techaddict.me>

Closes #13063 from techaddict/SPARK-15072-followup.
parent 603f4453
No related branches found
No related tags found
No related merge requests found
......@@ -33,7 +33,9 @@ object SparkSqlExample {
case None => new SparkConf().setAppName("Simple Sql App")
}
val sc = new SparkContext(conf)
val sparkSession = SparkSession.withHiveSupport(sc)
val sparkSession = SparkSession.builder
.enableHiveSupport()
.getOrCreate()
import sparkSession._
sql("DROP TABLE IF EXISTS src")
......@@ -41,14 +43,14 @@ object SparkSqlExample {
sql("LOAD DATA LOCAL INPATH 'data.txt' INTO TABLE src")
val results = sql("FROM src SELECT key, value WHERE key >= 0 AND KEY < 5").collect()
results.foreach(println)
def test(f: => Boolean, failureMsg: String) = {
if (!f) {
println(failureMsg)
System.exit(-1)
}
}
test(results.size == 5, "Unexpected number of selected elements: " + results)
println("Test succeeded")
sc.stop()
......
......@@ -41,7 +41,9 @@ atexit.register(lambda: sc.stop())
try:
# Try to access HiveConf, it will raise exception if Hive is not added
sc._jvm.org.apache.hadoop.hive.conf.HiveConf()
spark = SparkSession.withHiveSupport(sc)
spark = SparkSession.builder\
.enableHiveSupport()\
.getOrCreate()
except py4j.protocol.Py4JError:
spark = SparkSession(sc)
except TypeError:
......
......@@ -182,16 +182,6 @@ class SparkSession(object):
if SparkSession._instantiatedContext is None:
SparkSession._instantiatedContext = self
@classmethod
@since(2.0)
def withHiveSupport(cls, sparkContext):
"""Returns a new SparkSession with a catalog backed by Hive.
:param sparkContext: The underlying :class:`SparkContext`.
"""
jsparkSession = sparkContext._jvm.SparkSession.withHiveSupport(sparkContext._jsc.sc())
return cls(sparkContext, jsparkSession)
@since(2.0)
def newSession(self):
"""
......
......@@ -27,7 +27,7 @@ import org.apache.spark.sql.{SparkSession, SQLContext}
* An instance of the Spark SQL execution engine that integrates with data stored in Hive.
* Configuration for Hive is read from hive-site.xml on the classpath.
*/
@deprecated("Use SparkSession.withHiveSupport instead", "2.0.0")
@deprecated("Use SparkSession.builder.enableHiveSupport instead", "2.0.0")
class HiveContext private[hive](
_sparkSession: SparkSession,
isRootContext: Boolean)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment