Skip to content
Snippets Groups Projects
Commit 9c2a376e authored by gatorsmile's avatar gatorsmile Committed by Wenchen Fan
Browse files

[SPARK-15297][SQL] Fix Set -V Command

#### What changes were proposed in this pull request?
The command `SET -v` always outputs the default values even if we set the parameter. This behavior is incorrect. Instead, if users override it, we should output the user-specified value.

In addition, the output schema of `SET -v` is wrong. We should use the column `value` instead of `default` for the parameter value.

This PR is to fix the above two issues.

#### How was this patch tested?
Added a test case.

Author: gatorsmile <gatorsmile@gmail.com>

Closes #13081 from gatorsmile/setVcommand.
parent ebfe3a1f
No related branches found
No related tags found
No related merge requests found
......@@ -17,8 +17,6 @@
package org.apache.spark.sql.execution.command
import java.util.NoSuchElementException
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.expressions.Attribute
......@@ -88,7 +86,7 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm
}
val schema = StructType(
StructField("key", StringType, nullable = false) ::
StructField("default", StringType, nullable = false) ::
StructField("value", StringType, nullable = false) ::
StructField("meaning", StringType, nullable = false) :: Nil)
(schema.toAttributes, runFunc)
......
......@@ -752,7 +752,7 @@ private[sql] class SQLConf extends Serializable with CatalystConf with Logging {
*/
def getAllDefinedConfs: Seq[(String, String, String)] = sqlConfEntries.synchronized {
sqlConfEntries.values.asScala.filter(_.isPublic).map { entry =>
(entry.key, entry.defaultValueString, entry.doc)
(entry.key, getConfString(entry.key, entry.defaultValueString), entry.doc)
}.toSeq
}
......
......@@ -17,7 +17,7 @@
package org.apache.spark.sql.internal
import org.apache.spark.sql.{QueryTest, SparkSession, SQLContext}
import org.apache.spark.sql.{QueryTest, Row, SparkSession, SQLContext}
import org.apache.spark.sql.test.{SharedSQLContext, TestSQLContext}
class SQLConfSuite extends QueryTest with SharedSQLContext {
......@@ -75,6 +75,27 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
spark.wrapped.conf.clear()
}
test("set command for display") {
spark.wrapped.conf.clear()
checkAnswer(
sql("SET").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
Nil)
checkAnswer(
sql("SET -v").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
Row("spark.sql.groupByOrdinal", "true"))
sql("SET spark.sql.groupByOrdinal=false")
checkAnswer(
sql("SET").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
Row("spark.sql.groupByOrdinal", "false"))
checkAnswer(
sql("SET -v").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
Row("spark.sql.groupByOrdinal", "false"))
}
test("deprecated property") {
spark.wrapped.conf.clear()
val original = spark.conf.get(SQLConf.SHUFFLE_PARTITIONS)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment