Skip to content
Snippets Groups Projects
Commit 261c55dd authored by gatorsmile's avatar gatorsmile Committed by Yin Huai
Browse files

[SPARK-17250][SQL] Remove HiveClient and setCurrentDatabase from HiveSessionCatalog

### What changes were proposed in this pull request?
This is the first step to remove `HiveClient` from `HiveSessionState`. In the metastore interaction, we always use the fully qualified table name when accessing/operating a table. That means, we always specify the database. Thus, it is not necessary to use `HiveClient` to change the active database in Hive metastore.

In `HiveSessionCatalog `, `setCurrentDatabase` is the only function that uses `HiveClient`. Thus, we can remove it after removing `setCurrentDatabase`

### How was this patch tested?
The existing test cases.

Author: gatorsmile <gatorsmile@gmail.com>

Closes #14821 from gatorsmile/setCurrentDB.
parent fd4ba3f6
No related branches found
No related tags found
No related merge requests found
...@@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, ExpressionIn ...@@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, ExpressionIn
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper
import org.apache.spark.sql.hive.client.HiveClient
import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DecimalType, DoubleType} import org.apache.spark.sql.types.{DecimalType, DoubleType}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
...@@ -42,7 +41,6 @@ import org.apache.spark.util.Utils ...@@ -42,7 +41,6 @@ import org.apache.spark.util.Utils
private[sql] class HiveSessionCatalog( private[sql] class HiveSessionCatalog(
externalCatalog: HiveExternalCatalog, externalCatalog: HiveExternalCatalog,
client: HiveClient,
sparkSession: SparkSession, sparkSession: SparkSession,
functionResourceLoader: FunctionResourceLoader, functionResourceLoader: FunctionResourceLoader,
functionRegistry: FunctionRegistry, functionRegistry: FunctionRegistry,
...@@ -55,11 +53,6 @@ private[sql] class HiveSessionCatalog( ...@@ -55,11 +53,6 @@ private[sql] class HiveSessionCatalog(
conf, conf,
hadoopConf) { hadoopConf) {
override def setCurrentDatabase(db: String): Unit = {
super.setCurrentDatabase(db)
client.setCurrentDatabase(db)
}
override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = { override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = {
val table = formatTableName(name.table) val table = formatTableName(name.table)
if (name.database.isDefined || !tempTables.contains(table)) { if (name.database.isDefined || !tempTables.contains(table)) {
......
...@@ -45,7 +45,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) ...@@ -45,7 +45,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession)
override lazy val catalog = { override lazy val catalog = {
new HiveSessionCatalog( new HiveSessionCatalog(
sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog], sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog],
metadataHive,
sparkSession, sparkSession,
functionResourceLoader, functionResourceLoader,
functionRegistry, functionRegistry,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment