diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 403191af5e5be958cebbd44327e0c8315a7b1838..b0e71c7e7c7d1de378dd7fdfcee0e3a88e6ca635 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -115,8 +115,7 @@ statement
     | CLEAR CACHE                                                      #clearCache
     | LOAD DATA LOCAL? INPATH path=STRING OVERWRITE? INTO TABLE
         tableIdentifier partitionSpec?                                 #loadData
-    | TRUNCATE TABLE tableIdentifier partitionSpec?
-        (COLUMNS identifierList)?                                      #truncateTable
+    | TRUNCATE TABLE tableIdentifier partitionSpec?                    #truncateTable
     | op=(ADD | LIST) identifier .*?                                   #manageResource
     | SET ROLE .*?                                                     #failNativeCommand
     | SET .*?                                                          #setConfiguration
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 57f534cd9eb304934c308210048d92e69400565e..cfebfc6a5ce7186cd4384ade81098d207f44ae6f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -368,17 +368,12 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
    * For example:
    * {{{
    *   TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
-   *   [COLUMNS (col1, col2)]
    * }}}
    */
   override def visitTruncateTable(ctx: TruncateTableContext): LogicalPlan = withOrigin(ctx) {
-    if (ctx.identifierList != null) {
-      throw operationNotAllowed("TRUNCATE TABLE ... COLUMNS", ctx)
-    }
     TruncateTableCommand(
       visitTableIdentifier(ctx.tableIdentifier),
-      Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)
-    )
+      Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
   }
 
   /**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 13e63a1befb20b7066f2164b45ee5b3681ee45b0..bef4c9222c29647f658f38422e44621505127768 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -278,7 +278,7 @@ case class LoadDataCommand(
  *
  * The syntax of this command is:
  * {{{
- *  TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
+ *   TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
  * }}}
  */
 case class TruncateTableCommand(
@@ -288,9 +288,10 @@ case class TruncateTableCommand(
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
     if (!catalog.tableExists(tableName)) {
-      logError(s"table '$tableName' in TRUNCATE TABLE does not exist.")
+      throw new AnalysisException(s"Table '$tableName' in TRUNCATE TABLE does not exist.")
     } else if (catalog.isTemporaryTable(tableName)) {
-      logError(s"table '$tableName' in TRUNCATE TABLE is a temporary table.")
+      throw new AnalysisException(
+        s"Operation not allowed: TRUNCATE TABLE on temporary tables: '$tableName'")
     } else {
       val locations = if (partitionSpec.isDefined) {
         catalog.listPartitions(tableName, partitionSpec).map(_.storage.locationUri)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index df62ba08b801803d2eff28cfe5f83b0ea1e5b871..6f374d713bfc849733c3284b036a4b097c321b92 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -289,10 +289,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
 
       val testResults = sql("SELECT * FROM non_part_table").collect()
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE non_part_table COLUMNS (employeeID)")
-      }
-
       sql("TRUNCATE TABLE non_part_table")
       checkAnswer(sql("SELECT * FROM non_part_table"), Seq.empty[Row])
 
@@ -320,10 +316,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '2'"),
         testResults)
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE part_table PARTITION(c='1', d='1') COLUMNS (employeeID)")
-      }
-
       sql("TRUNCATE TABLE part_table PARTITION(c='1', d='1')")
       checkAnswer(
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '1'"),
@@ -332,10 +324,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '2'"),
         testResults)
 
-      intercept[ParseException] {
-        sql("TRUNCATE TABLE part_table PARTITION(c='1') COLUMNS (employeeID)")
-      }
-
       sql("TRUNCATE TABLE part_table PARTITION(c='1')")
       checkAnswer(
         sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1'"),