diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala index 171def43b570583e081edcbb164dddd5d1cd2556..6f4332c65f934439fe2ed0bcbad9724ad947b5e6 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala @@ -21,7 +21,6 @@ import java.util.regex.Pattern import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.conf.HiveConf.ConfVars -import org.apache.hadoop.hive.ql.parse.VariableSubstitution import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.analysis.Analyzer @@ -109,7 +108,7 @@ private[hive] class HiveSessionState(ctx: SQLContext) extends SessionState(ctx) /** * Parser for HiveQl query texts. */ - override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf, hiveconf) + override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf) /** * Planner that takes into account Hive-specific strategies. diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala index 1c4cda7109bc353aaa3ba7f203cf479f96d79fc8..989da92bc71d4a13b276345e461506986a0e651f 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala @@ -20,8 +20,6 @@ package org.apache.spark.sql.hive.execution import scala.util.Try import org.antlr.v4.runtime.Token -import org.apache.hadoop.hive.conf.HiveConf -import org.apache.hadoop.hive.ql.parse.VariableSubstitution import org.apache.hadoop.hive.serde.serdeConstants import org.apache.spark.sql.catalyst.catalog._ @@ -29,23 +27,23 @@ import org.apache.spark.sql.catalyst.parser._ import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.execution.SparkSqlAstBuilder -import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.internal.{SQLConf, VariableSubstitution} /** * Concrete parser for HiveQl statements. */ -class HiveSqlParser(conf: SQLConf, hiveconf: HiveConf) extends AbstractSqlParser { +class HiveSqlParser(conf: SQLConf) extends AbstractSqlParser { val astBuilder = new HiveSqlAstBuilder(conf) - lazy val substitutor = new VariableSubstitution + private val substitutor = new VariableSubstitution(conf) protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = { - super.parse(substitutor.substitute(hiveconf, command))(toResult) + super.parse(substitutor.substitute(command))(toResult) } protected override def nativeCommand(sqlText: String): LogicalPlan = { - HiveNativeCommand(substitutor.substitute(hiveconf, sqlText)) + HiveNativeCommand(substitutor.substitute(sqlText)) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index b97e9fe35c48717d1f7e4608cc1654ebb80521c0..345ee8ef28eaed5fc3a1a3ed932f98bc3aa0a787 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -512,13 +512,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { sql("SELECT key FROM ${hiveconf:tbl} ORDER BY key, value limit 1"), sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq) - sql("set hive.variable.substitute=false") // disable the substitution + sql("set spark.sql.variable.substitute=false") // disable the substitution sql("set tbl2=src") intercept[Exception] { sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1").collect() } - sql("set hive.variable.substitute=true") // enable the substitution + sql("set spark.sql.variable.substitute=true") // enable the substitution checkAnswer( sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1"), sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq)