diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 075c73d7a320d05b57ebebbe6b758704a4ed23ba..63055b62d2e70f5ec4047a013a77b538381ec30d 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -120,8 +120,10 @@ statement
         (USING resource (',' resource)*)?                              #createFunction
     | DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName              #dropFunction
     | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN)? statement    #explain
-    | SHOW TABLES EXTENDED? ((FROM | IN) db=identifier)?
-        (LIKE? pattern=STRING)? partitionSpec?                         #showTables
+    | SHOW TABLES ((FROM | IN) db=identifier)?
+        (LIKE? pattern=STRING)?                                        #showTables
+    | SHOW TABLE EXTENDED ((FROM | IN) db=identifier)?
+        LIKE pattern=STRING partitionSpec?                             #showTable
     | SHOW DATABASES (LIKE pattern=STRING)?                            #showDatabases
     | SHOW TBLPROPERTIES table=tableIdentifier
         ('(' key=tablePropertyKey ')')?                                #showTblProperties
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 4400174e92727caf1197a595e83aa9c6f475dfda..cab1b22c99ced55f60e425c9e9b2e2fb6f9c97be 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -126,23 +126,33 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
    * Create a [[ShowTablesCommand]] logical plan.
    * Example SQL :
    * {{{
-   *   SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']
-   *   [PARTITION(partition_spec)];
+   *   SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
    * }}}
    */
   override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
+    ShowTablesCommand(
+      Option(ctx.db).map(_.getText),
+      Option(ctx.pattern).map(string),
+      isExtended = false)
+  }
+
+  /**
+   * Create a [[ShowTablesCommand]] logical plan.
+   * Example SQL :
+   * {{{
+   *   SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards'
+   *   [PARTITION(partition_spec)];
+   * }}}
+   */
+  override def visitShowTable(ctx: ShowTableContext): LogicalPlan = withOrigin(ctx) {
     if (ctx.partitionSpec != null) {
-      operationNotAllowed("SHOW TABLES [EXTENDED] ... PARTITION", ctx)
-    }
-    if (ctx.EXTENDED != null && ctx.pattern == null) {
-      throw new AnalysisException(
-        s"SHOW TABLES EXTENDED must have identifier_with_wildcards specified.")
+      operationNotAllowed("SHOW TABLE EXTENDED ... PARTITION", ctx)
     }
 
     ShowTablesCommand(
       Option(ctx.db).map(_.getText),
       Option(ctx.pattern).map(string),
-      ctx.EXTENDED != null)
+      isExtended = true)
   }
 
   /**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index d2a7556476a8177cc510a0049369cd0114717488..012b6ea4c5bd8f54602a70ea7139e1dfb99c9be9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -592,7 +592,8 @@ case class DescribeTableCommand(
  * If a databaseName is not given, the current database will be used.
  * The syntax of using this command in SQL is:
  * {{{
- *   SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ *   SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ *   SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards';
  * }}}
  */
 case class ShowTablesCommand(
@@ -600,8 +601,8 @@ case class ShowTablesCommand(
     tableIdentifierPattern: Option[String],
     isExtended: Boolean = false) extends RunnableCommand {
 
-  // The result of SHOW TABLES has three basic columns: database, tableName and isTemporary.
-  // If `isExtended` is true, append column `information` to the output columns.
+  // The result of SHOW TABLES/SHOW TABLE has three basic columns: database, tableName and
+  // isTemporary. If `isExtended` is true, append column `information` to the output columns.
   override val output: Seq[Attribute] = {
     val tableExtendedInfo = if (isExtended) {
       AttributeReference("information", StringType, nullable = false)() :: Nil
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
index a16c39819afe8590070fe74a2f7f5ce48d0a632d..18d02e150ec67de32975cbb43759a00c86098371 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
@@ -16,11 +16,11 @@ SHOW TABLES 'show_t*';
 SHOW TABLES LIKE 'show_t1*|show_t2*';
 SHOW TABLES IN showdb 'show_t*';
 
--- SHOW TABLES EXTENDED
+-- SHOW TABLE EXTENDED
 -- Ignore these because there exist timestamp results, e.g. `Created`.
--- SHOW TABLES EXTENDED LIKE 'show_t*';
-SHOW TABLES EXTENDED;
-SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us');
+-- SHOW TABLE EXTENDED LIKE 'show_t*';
+SHOW TABLE EXTENDED;
+SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us');
 
 -- Clean Up
 DROP TABLE show_t1;
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
index a4f411258dabec4943c55d7793790d14b5edea1c..904601bf1133c73482b6878bf59e80b9aa3214a2 100644
--- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -114,28 +114,30 @@ show_t3
 
 
 -- !query 12
-SHOW TABLES EXTENDED
+SHOW TABLE EXTENDED
 -- !query 12 schema
 struct<>
 -- !query 12 output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-SHOW TABLES EXTENDED must have identifier_with_wildcards specified.
+mismatched input '<EOF>' expecting 'LIKE'(line 1, pos 19)
+
 == SQL ==
-SHOW TABLES EXTENDED
+SHOW TABLE EXTENDED
+-------------------^^^
 
 
 -- !query 13
-SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us')
 -- !query 13 schema
 struct<>
 -- !query 13 output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-Operation not allowed: SHOW TABLES [EXTENDED] ... PARTITION(line 1, pos 0)
+Operation not allowed: SHOW TABLE EXTENDED ... PARTITION(line 1, pos 0)
 
 == SQL ==
-SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us')
 ^^^
 
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index e61beb49e47f4a53fc0b34714dc2e95a476a41da..4c0e0fa990f68f1c11326d2994bce67ab3aee4cb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -885,7 +885,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
     testRenamePartitions(isDatasourceTable = true)
   }
 
-  test("show tables") {
+  test("show table extended") {
     withTempView("show1a", "show2b") {
       sql(
         """
@@ -909,9 +909,9 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
           |)
         """.stripMargin)
       assert(
-        sql("SHOW TABLES EXTENDED LIKE 'show*'").count() >= 2)
+        sql("SHOW TABLE EXTENDED LIKE 'show*'").count() >= 2)
       assert(
-        sql("SHOW TABLES EXTENDED LIKE 'show*'").schema ==
+        sql("SHOW TABLE EXTENDED LIKE 'show*'").schema ==
           StructType(StructField("database", StringType, false) ::
             StructField("tableName", StringType, false) ::
             StructField("isTemporary", BooleanType, false) ::