Skip to content
Snippets Groups Projects
Commit 7ac79da0 authored by Dongjoon Hyun's avatar Dongjoon Hyun Committed by Herman van Hovell
Browse files

[SPARK-16459][SQL] Prevent dropping current database

## What changes were proposed in this pull request?

This PR prevents dropping the current database to avoid errors like the followings.

```scala
scala> sql("create database delete_db")
scala> sql("use delete_db")
scala> sql("drop database delete_db")
scala> sql("create table t as select 1")
org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException: Database `delete_db` not found;
```

## How was this patch tested?

Pass the Jenkins tests including an updated testcase.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #14115 from dongjoon-hyun/SPARK-16459.
parent 9cb1eb7a
No related branches found
No related tags found
No related merge requests found
......@@ -34,6 +34,10 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.util.StringUtils
object SessionCatalog {
val DEFAULT_DATABASE = "default"
}
/**
* An internal catalog that is used by a Spark Session. This internal catalog serves as a
* proxy to the underlying metastore (e.g. Hive Metastore) and it also manages temporary
......@@ -47,6 +51,7 @@ class SessionCatalog(
functionRegistry: FunctionRegistry,
conf: CatalystConf,
hadoopConf: Configuration) extends Logging {
import SessionCatalog._
import CatalogTypes.TablePartitionSpec
// For testing only.
......@@ -77,7 +82,7 @@ class SessionCatalog(
// the corresponding item in the current database.
@GuardedBy("this")
protected var currentDb = {
val defaultName = "default"
val defaultName = DEFAULT_DATABASE
val defaultDbDefinition =
CatalogDatabase(defaultName, "default database", conf.warehousePath, Map())
// Initialize default database if it doesn't already exist
......@@ -146,8 +151,10 @@ class SessionCatalog(
def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = {
val dbName = formatDatabaseName(db)
if (dbName == "default") {
if (dbName == DEFAULT_DATABASE) {
throw new AnalysisException(s"Can not drop default database")
} else if (dbName == getCurrentDatabase) {
throw new AnalysisException(s"Can not drop current database `${dbName}`")
}
externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade)
}
......@@ -878,14 +885,14 @@ class SessionCatalog(
* This is mainly used for tests.
*/
private[sql] def reset(): Unit = synchronized {
val default = "default"
listDatabases().filter(_ != default).foreach { db =>
setCurrentDatabase(DEFAULT_DATABASE)
listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db =>
dropDatabase(db, ignoreIfNotExists = false, cascade = true)
}
listTables(default).foreach { table =>
listTables(DEFAULT_DATABASE).foreach { table =>
dropTable(table, ignoreIfNotExists = false)
}
listFunctions(default).map(_._1).foreach { func =>
listFunctions(DEFAULT_DATABASE).map(_._1).foreach { func =>
if (func.database.isDefined) {
dropFunction(func, ignoreIfNotExists = false)
} else {
......@@ -902,7 +909,6 @@ class SessionCatalog(
require(functionBuilder.isDefined, s"built-in function '$f' is missing function builder")
functionRegistry.registerFunction(f, expressionInfo.get, functionBuilder.get)
}
setCurrentDatabase(default)
}
}
......@@ -1270,6 +1270,15 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
"WITH SERDEPROPERTIES ('spark.sql.sources.me'='anything')")
}
test("drop current database") {
sql("CREATE DATABASE temp")
sql("USE temp")
val m = intercept[AnalysisException] {
sql("DROP DATABASE temp")
}.getMessage
assert(m.contains("Can not drop current database `temp`"))
}
test("drop default database") {
Seq("true", "false").foreach { caseSensitive =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {
......
......@@ -93,6 +93,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
hc.sql("DROP TABLE mee_table")
val tables2 = hc.sql("SHOW TABLES IN mee_db").collect().map(_.getString(0))
assert(tables2.isEmpty)
hc.sql("USE default")
hc.sql("DROP DATABASE mee_db CASCADE")
val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
assert(databases3.toSeq == Seq("default"))
......
......@@ -472,6 +472,7 @@ class HiveDDLSuite
sql(s"DROP TABLE $tabName")
assert(tmpDir.listFiles.isEmpty)
sql("USE default")
sql(s"DROP DATABASE $dbName")
assert(!fs.exists(new Path(tmpDir.toString)))
}
......@@ -526,6 +527,7 @@ class HiveDDLSuite
assert(!tableDirectoryExists(TableIdentifier(tabName), Option(expectedDBLocation)))
}
sql(s"USE default")
val sqlDropDatabase = s"DROP DATABASE $dbName ${if (cascade) "CASCADE" else "RESTRICT"}"
if (tableExists && !cascade) {
val message = intercept[AnalysisException] {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment