Skip to content
Snippets Groups Projects
Commit fb2ea54a authored by Burak Yavuz's avatar Burak Yavuz Committed by Herman van Hovell
Browse files

[SPARK-18465] Add 'IF EXISTS' clause to 'UNCACHE' to not throw exceptions when table doesn't exist


## What changes were proposed in this pull request?

While this behavior is debatable, consider the following use case:
```sql
UNCACHE TABLE foo;
CACHE TABLE foo AS
SELECT * FROM bar
```
The command above fails the first time you run it. But I want to run the command above over and over again, and I don't want to change my code just for the first run of it.
The issue is that subsequent `CACHE TABLE` commands do not overwrite the existing table.

Now we can do:
```sql
UNCACHE TABLE IF EXISTS foo;
CACHE TABLE foo AS
SELECT * FROM bar
```

## How was this patch tested?

Unit tests

Author: Burak Yavuz <brkyvz@gmail.com>

Closes #15896 from brkyvz/uncache.

(cherry picked from commit bdc8153e)
Signed-off-by: default avatarHerman van Hovell <hvanhovell@databricks.com>
parent fa360134
No related branches found
No related tags found
No related merge requests found
......@@ -142,7 +142,7 @@ statement
| REFRESH TABLE tableIdentifier #refreshTable
| REFRESH .*? #refreshResource
| CACHE LAZY? TABLE tableIdentifier (AS? query)? #cacheTable
| UNCACHE TABLE tableIdentifier #uncacheTable
| UNCACHE TABLE (IF EXISTS)? tableIdentifier #uncacheTable
| CLEAR CACHE #clearCache
| LOAD DATA LOCAL? INPATH path=STRING OVERWRITE? INTO TABLE
tableIdentifier partitionSpec? #loadData
......
......@@ -233,7 +233,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
* Create an [[UncacheTableCommand]] logical plan.
*/
override def visitUncacheTable(ctx: UncacheTableContext): LogicalPlan = withOrigin(ctx) {
UncacheTableCommand(visitTableIdentifier(ctx.tableIdentifier))
UncacheTableCommand(visitTableIdentifier(ctx.tableIdentifier), ctx.EXISTS != null)
}
/**
......
......@@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.command
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
......@@ -49,10 +50,17 @@ case class CacheTableCommand(
}
case class UncacheTableCommand(tableIdent: TableIdentifier) extends RunnableCommand {
case class UncacheTableCommand(
tableIdent: TableIdentifier,
ifExists: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.catalog.uncacheTable(tableIdent.quotedString)
val tableId = tableIdent.quotedString
try {
sparkSession.catalog.uncacheTable(tableId)
} catch {
case _: NoSuchTableException if ifExists => // don't throw
}
Seq.empty[Row]
}
}
......
......@@ -101,13 +101,16 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
sql("DROP TABLE IF EXISTS nonexistantTable")
}
test("correct error on uncache of nonexistant tables") {
test("uncache of nonexistant tables") {
// make sure table doesn't exist
intercept[NoSuchTableException](spark.table("nonexistantTable"))
intercept[NoSuchTableException] {
spark.catalog.uncacheTable("nonexistantTable")
}
intercept[NoSuchTableException] {
sql("UNCACHE TABLE nonexistantTable")
}
sql("UNCACHE TABLE IF EXISTS nonexistantTable")
}
test("no error on uncache of non-cached table") {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment