Skip to content
Snippets Groups Projects
Commit 2d81ba54 authored by gatorsmile's avatar gatorsmile Committed by Yin Huai
Browse files

[SPARK-14362][SPARK-14406][SQL][FOLLOW-UP] DDL Native Support: Drop View and Drop Table

#### What changes were proposed in this pull request?
In this PR, we are trying to address the comment in the original PR: https://github.com/apache/spark/commit/dfce9665c4b2b29a19e6302216dae2800da68ff9#commitcomment-17057030

In this PR, we checks if table/view exists at the beginning and then does not need to capture the exceptions, including `NoSuchTableException` and `InvalidTableException`. We still capture the NonFatal exception when doing `sqlContext.cacheManager.tryUncacheQuery`.

#### How was this patch tested?
The existing test cases should cover the code changes of this PR.

Author: gatorsmile <gatorsmile@gmail.com>

Closes #12321 from gatorsmile/dropViewFollowup.
parent 83fb9640
No related branches found
No related tags found
No related merge requests found
...@@ -17,6 +17,8 @@ ...@@ -17,6 +17,8 @@
package org.apache.spark.sql.execution.command package org.apache.spark.sql.execution.command
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Row, SQLContext} import org.apache.spark.sql.{AnalysisException, Row, SQLContext}
import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.TableIdentifier
...@@ -192,31 +194,31 @@ case class DropTable( ...@@ -192,31 +194,31 @@ case class DropTable(
override def run(sqlContext: SQLContext): Seq[Row] = { override def run(sqlContext: SQLContext): Seq[Row] = {
val catalog = sqlContext.sessionState.catalog val catalog = sqlContext.sessionState.catalog
// If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view if (!catalog.tableExists(tableName)) {
// issue an exception. if (!ifExists) {
catalog.getTableMetadataOption(tableName).map(_.tableType match { val objectName = if (isView) "View" else "Table"
case CatalogTableType.VIRTUAL_VIEW if !isView => logError(s"$objectName '${tableName.quotedString}' does not exist")
throw new AnalysisException( }
"Cannot drop a view with DROP TABLE. Please use DROP VIEW instead") } else {
case o if o != CatalogTableType.VIRTUAL_VIEW && isView => // If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view
throw new AnalysisException( // issue an exception.
s"Cannot drop a table with DROP VIEW. Please use DROP TABLE instead") catalog.getTableMetadataOption(tableName).map(_.tableType match {
case _ => case CatalogTableType.VIRTUAL_VIEW if !isView =>
}) throw new AnalysisException(
"Cannot drop a view with DROP TABLE. Please use DROP VIEW instead")
try { case o if o != CatalogTableType.VIRTUAL_VIEW && isView =>
sqlContext.cacheManager.tryUncacheQuery(sqlContext.table(tableName.quotedString)) throw new AnalysisException(
} catch { s"Cannot drop a table with DROP VIEW. Please use DROP TABLE instead")
// This table's metadata is not in Hive metastore (e.g. the table does not exist). case _ =>
case e if e.getClass.getName == "org.apache.hadoop.hive.ql.metadata.InvalidTableException" => })
case _: org.apache.spark.sql.catalyst.analysis.NoSuchTableException => try {
// Other Throwables can be caused by users providing wrong parameters in OPTIONS sqlContext.cacheManager.tryUncacheQuery(sqlContext.table(tableName.quotedString))
// (e.g. invalid paths). We catch it and log a warning message. } catch {
// Users should be able to drop such kinds of tables regardless if there is an error. case NonFatal(e) => log.warn(s"${e.getMessage}", e)
case e: Throwable => log.warn(s"${e.getMessage}", e) }
catalog.invalidateTable(tableName)
catalog.dropTable(tableName, ifExists)
} }
catalog.invalidateTable(tableName)
catalog.dropTable(tableName, ifExists)
Seq.empty[Row] Seq.empty[Row]
} }
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment