Skip to content
Snippets Groups Projects
Commit 3a21e8d5 authored by Reynold Xin's avatar Reynold Xin
Browse files

[SPARK-14795][SQL] Remove the use of Hive's variable substitution

## What changes were proposed in this pull request?
This patch builds on #12556 and completely removes the use of Hive's variable substitution.

## How was this patch tested?
Covered by existing tests.

Author: Reynold Xin <rxin@databricks.com>

Closes #12561 from rxin/SPARK-14795.
parent 79008e6c
No related branches found
No related tags found
No related merge requests found
...@@ -21,7 +21,6 @@ import java.util.regex.Pattern ...@@ -21,7 +21,6 @@ import java.util.regex.Pattern
import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.conf.HiveConf.ConfVars import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.spark.sql._ import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.analysis.Analyzer import org.apache.spark.sql.catalyst.analysis.Analyzer
...@@ -109,7 +108,7 @@ private[hive] class HiveSessionState(ctx: SQLContext) extends SessionState(ctx) ...@@ -109,7 +108,7 @@ private[hive] class HiveSessionState(ctx: SQLContext) extends SessionState(ctx)
/** /**
* Parser for HiveQl query texts. * Parser for HiveQl query texts.
*/ */
override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf, hiveconf) override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf)
/** /**
* Planner that takes into account Hive-specific strategies. * Planner that takes into account Hive-specific strategies.
......
...@@ -20,8 +20,6 @@ package org.apache.spark.sql.hive.execution ...@@ -20,8 +20,6 @@ package org.apache.spark.sql.hive.execution
import scala.util.Try import scala.util.Try
import org.antlr.v4.runtime.Token import org.antlr.v4.runtime.Token
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.hadoop.hive.serde.serdeConstants import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog._
...@@ -29,23 +27,23 @@ import org.apache.spark.sql.catalyst.parser._ ...@@ -29,23 +27,23 @@ import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkSqlAstBuilder import org.apache.spark.sql.execution.SparkSqlAstBuilder
import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.{SQLConf, VariableSubstitution}
/** /**
* Concrete parser for HiveQl statements. * Concrete parser for HiveQl statements.
*/ */
class HiveSqlParser(conf: SQLConf, hiveconf: HiveConf) extends AbstractSqlParser { class HiveSqlParser(conf: SQLConf) extends AbstractSqlParser {
val astBuilder = new HiveSqlAstBuilder(conf) val astBuilder = new HiveSqlAstBuilder(conf)
lazy val substitutor = new VariableSubstitution private val substitutor = new VariableSubstitution(conf)
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = { protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
super.parse(substitutor.substitute(hiveconf, command))(toResult) super.parse(substitutor.substitute(command))(toResult)
} }
protected override def nativeCommand(sqlText: String): LogicalPlan = { protected override def nativeCommand(sqlText: String): LogicalPlan = {
HiveNativeCommand(substitutor.substitute(hiveconf, sqlText)) HiveNativeCommand(substitutor.substitute(sqlText))
} }
} }
......
...@@ -512,13 +512,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { ...@@ -512,13 +512,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
sql("SELECT key FROM ${hiveconf:tbl} ORDER BY key, value limit 1"), sql("SELECT key FROM ${hiveconf:tbl} ORDER BY key, value limit 1"),
sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq) sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq)
sql("set hive.variable.substitute=false") // disable the substitution sql("set spark.sql.variable.substitute=false") // disable the substitution
sql("set tbl2=src") sql("set tbl2=src")
intercept[Exception] { intercept[Exception] {
sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1").collect() sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1").collect()
} }
sql("set hive.variable.substitute=true") // enable the substitution sql("set spark.sql.variable.substitute=true") // enable the substitution
checkAnswer( checkAnswer(
sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1"), sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1"),
sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq) sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment