diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index f67ddc9be1a5a93f8443cc6d9d6c14f18c56a3bb..f321c45e5c518996f659413bd568d6af6afdf0cf 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -167,7 +167,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat } override def databaseExists(db: String): Boolean = withClient { - client.getDatabaseOption(db).isDefined + client.databaseExists(db) } override def listDatabases(): Seq[String] = withClient { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala index 8e7c871183dfd01be3d153e9fd188c7e638742a8..0be5b0bedfe776b6e91abeabc912351aea7e8386 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala @@ -58,12 +58,10 @@ private[hive] trait HiveClient { def setCurrentDatabase(databaseName: String): Unit /** Returns the metadata for specified database, throwing an exception if it doesn't exist */ - final def getDatabase(name: String): CatalogDatabase = { - getDatabaseOption(name).getOrElse(throw new NoSuchDatabaseException(name)) - } + def getDatabase(name: String): CatalogDatabase - /** Returns the metadata for a given database, or None if it doesn't exist. */ - def getDatabaseOption(name: String): Option[CatalogDatabase] + /** Return whether a table/view with the specified name exists. */ + def databaseExists(dbName: String): Boolean /** List the names of all the databases that match the specified pattern. */ def listDatabases(pattern: String): Seq[String] diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index db73596e5f520711857dc7b8caca95e6b8880b48..e0f71560f33028a3a6f6749222075c95eed4faa8 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -300,7 +300,7 @@ private[hive] class HiveClientImpl( } override def setCurrentDatabase(databaseName: String): Unit = withHiveState { - if (getDatabaseOption(databaseName).isDefined) { + if (databaseExists(databaseName)) { state.setCurrentDatabase(databaseName) } else { throw new NoSuchDatabaseException(databaseName) @@ -336,14 +336,18 @@ private[hive] class HiveClientImpl( Option(database.properties).map(_.asJava).orNull)) } - override def getDatabaseOption(name: String): Option[CatalogDatabase] = withHiveState { - Option(client.getDatabase(name)).map { d => + override def getDatabase(dbName: String): CatalogDatabase = withHiveState { + Option(client.getDatabase(dbName)).map { d => CatalogDatabase( name = d.getName, description = d.getDescription, locationUri = d.getLocationUri, properties = Option(d.getParameters).map(_.asScala.toMap).orNull) - } + }.getOrElse(throw new NoSuchDatabaseException(dbName)) + } + + override def databaseExists(dbName: String): Boolean = withHiveState { + client.databaseExists(dbName) } override def listDatabases(pattern: String): Seq[String] = withHiveState { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala index bfec43070a797bc8cea661c70f04da2545930755..e706e2eb1f438e6eb94243f1f51ef7fcb567e058 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala @@ -28,7 +28,7 @@ import org.apache.spark.SparkFunSuite import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} -import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException +import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPermanentFunctionException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal} import org.apache.spark.sql.catalyst.util.quietly @@ -137,11 +137,12 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet test(s"$version: getDatabase") { // No exception should be thrown client.getDatabase("default") + intercept[NoSuchDatabaseException](client.getDatabase("nonexist")) } - test(s"$version: getDatabaseOption") { - assert(client.getDatabaseOption("default").isDefined) - assert(client.getDatabaseOption("nonexist") == None) + test(s"$version: databaseExists") { + assert(client.databaseExists("default") == true) + assert(client.databaseExists("nonexist") == false) } test(s"$version: listDatabases") { @@ -155,9 +156,9 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet } test(s"$version: dropDatabase") { - assert(client.getDatabaseOption("temporary").isDefined) + assert(client.databaseExists("temporary") == true) client.dropDatabase("temporary", ignoreIfNotExists = false, cascade = true) - assert(client.getDatabaseOption("temporary").isEmpty) + assert(client.databaseExists("temporary") == false) } ///////////////////////////////////////////////////////////////////////////