From 5572ccf86b084eb5938fe62fd5d9973ec14d555d Mon Sep 17 00:00:00 2001 From: jiangxingbo <jiangxb1987@gmail.com> Date: Tue, 13 Dec 2016 19:04:34 +0100 Subject: [PATCH] [SPARK-17932][SQL][FOLLOWUP] Change statement `SHOW TABLES EXTENDED` to `SHOW TABLE EXTENDED` ## What changes were proposed in this pull request? Change the statement `SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] [PARTITION(partition_spec)]` to the following statements: - SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] - SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards' [PARTITION(partition_spec)] After this change, the statements `SHOW TABLE/SHOW TABLES` have the same syntax with that HIVE has. ## How was this patch tested? Modified the test sql file `show-tables.sql`; Modified the test suite `DDLSuite`. Author: jiangxingbo <jiangxb1987@gmail.com> Closes #16262 from jiangxb1987/show-table-extended. --- .../spark/sql/catalyst/parser/SqlBase.g4 | 6 +++-- .../spark/sql/execution/SparkSqlParser.scala | 26 +++++++++++++------ .../spark/sql/execution/command/tables.scala | 7 ++--- .../sql-tests/inputs/show-tables.sql | 8 +++--- .../sql-tests/results/show-tables.sql.out | 14 +++++----- .../sql/execution/command/DDLSuite.scala | 6 ++--- 6 files changed, 41 insertions(+), 26 deletions(-) diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index 075c73d7a3..63055b62d2 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -120,8 +120,10 @@ statement (USING resource (',' resource)*)? #createFunction | DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName #dropFunction | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN)? statement #explain - | SHOW TABLES EXTENDED? ((FROM | IN) db=identifier)? - (LIKE? pattern=STRING)? partitionSpec? #showTables + | SHOW TABLES ((FROM | IN) db=identifier)? + (LIKE? pattern=STRING)? #showTables + | SHOW TABLE EXTENDED ((FROM | IN) db=identifier)? + LIKE pattern=STRING partitionSpec? #showTable | SHOW DATABASES (LIKE pattern=STRING)? #showDatabases | SHOW TBLPROPERTIES table=tableIdentifier ('(' key=tablePropertyKey ')')? #showTblProperties diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 4400174e92..cab1b22c99 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -126,23 +126,33 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { * Create a [[ShowTablesCommand]] logical plan. * Example SQL : * {{{ - * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] - * [PARTITION(partition_spec)]; + * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; * }}} */ override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) { + ShowTablesCommand( + Option(ctx.db).map(_.getText), + Option(ctx.pattern).map(string), + isExtended = false) + } + + /** + * Create a [[ShowTablesCommand]] logical plan. + * Example SQL : + * {{{ + * SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards' + * [PARTITION(partition_spec)]; + * }}} + */ + override def visitShowTable(ctx: ShowTableContext): LogicalPlan = withOrigin(ctx) { if (ctx.partitionSpec != null) { - operationNotAllowed("SHOW TABLES [EXTENDED] ... PARTITION", ctx) - } - if (ctx.EXTENDED != null && ctx.pattern == null) { - throw new AnalysisException( - s"SHOW TABLES EXTENDED must have identifier_with_wildcards specified.") + operationNotAllowed("SHOW TABLE EXTENDED ... PARTITION", ctx) } ShowTablesCommand( Option(ctx.db).map(_.getText), Option(ctx.pattern).map(string), - ctx.EXTENDED != null) + isExtended = true) } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index d2a7556476..012b6ea4c5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -592,7 +592,8 @@ case class DescribeTableCommand( * If a databaseName is not given, the current database will be used. * The syntax of using this command in SQL is: * {{{ - * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; + * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; + * SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards'; * }}} */ case class ShowTablesCommand( @@ -600,8 +601,8 @@ case class ShowTablesCommand( tableIdentifierPattern: Option[String], isExtended: Boolean = false) extends RunnableCommand { - // The result of SHOW TABLES has three basic columns: database, tableName and isTemporary. - // If `isExtended` is true, append column `information` to the output columns. + // The result of SHOW TABLES/SHOW TABLE has three basic columns: database, tableName and + // isTemporary. If `isExtended` is true, append column `information` to the output columns. override val output: Seq[Attribute] = { val tableExtendedInfo = if (isExtended) { AttributeReference("information", StringType, nullable = false)() :: Nil diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql index a16c39819a..18d02e150e 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql @@ -16,11 +16,11 @@ SHOW TABLES 'show_t*'; SHOW TABLES LIKE 'show_t1*|show_t2*'; SHOW TABLES IN showdb 'show_t*'; --- SHOW TABLES EXTENDED +-- SHOW TABLE EXTENDED -- Ignore these because there exist timestamp results, e.g. `Created`. --- SHOW TABLES EXTENDED LIKE 'show_t*'; -SHOW TABLES EXTENDED; -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us'); +-- SHOW TABLE EXTENDED LIKE 'show_t*'; +SHOW TABLE EXTENDED; +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us'); -- Clean Up DROP TABLE show_t1; diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index a4f411258d..904601bf11 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -114,28 +114,30 @@ show_t3 -- !query 12 -SHOW TABLES EXTENDED +SHOW TABLE EXTENDED -- !query 12 schema struct<> -- !query 12 output org.apache.spark.sql.catalyst.parser.ParseException -SHOW TABLES EXTENDED must have identifier_with_wildcards specified. +mismatched input '<EOF>' expecting 'LIKE'(line 1, pos 19) + == SQL == -SHOW TABLES EXTENDED +SHOW TABLE EXTENDED +-------------------^^^ -- !query 13 -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us') +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') -- !query 13 schema struct<> -- !query 13 output org.apache.spark.sql.catalyst.parser.ParseException -Operation not allowed: SHOW TABLES [EXTENDED] ... PARTITION(line 1, pos 0) +Operation not allowed: SHOW TABLE EXTENDED ... PARTITION(line 1, pos 0) == SQL == -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us') +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') ^^^ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e61beb49e4..4c0e0fa990 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -885,7 +885,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { testRenamePartitions(isDatasourceTable = true) } - test("show tables") { + test("show table extended") { withTempView("show1a", "show2b") { sql( """ @@ -909,9 +909,9 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { |) """.stripMargin) assert( - sql("SHOW TABLES EXTENDED LIKE 'show*'").count() >= 2) + sql("SHOW TABLE EXTENDED LIKE 'show*'").count() >= 2) assert( - sql("SHOW TABLES EXTENDED LIKE 'show*'").schema == + sql("SHOW TABLE EXTENDED LIKE 'show*'").schema == StructType(StructField("database", StringType, false) :: StructField("tableName", StringType, false) :: StructField("isTemporary", BooleanType, false) :: -- GitLab