diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index dd3f17d525abd926d1d4498bcc8672aa9f62328b..ffea6285528a346c28f3d68a75ea66f20e19bce8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -185,7 +185,7 @@ case class DropTableCommand( if (!catalog.tableExists(tableName)) { if (!ifExists) { val objectName = if (isView) "View" else "Table" - logError(s"$objectName '${tableName.quotedString}' does not exist") + throw new AnalysisException(s"$objectName to drop '$tableName' does not exist") } } else { // If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view @@ -202,7 +202,7 @@ case class DropTableCommand( try { sparkSession.cacheManager.tryUncacheQuery(sparkSession.table(tableName.quotedString)) } catch { - case NonFatal(e) => log.warn(s"${e.getMessage}", e) + case NonFatal(e) => log.warn(e.toString, e) } catalog.invalidateTable(tableName) catalog.dropTable(tableName, ifExists) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 64f5a4ac479a750abf89ee9088095a77eee33256..bddd3f2119ae24a05ccaddc72c33ab38d0030162 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -741,14 +741,12 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { sql("DROP TABLE dbx.tab1") assert(catalog.listTables("dbx") == Nil) sql("DROP TABLE IF EXISTS dbx.tab1") - // no exception will be thrown - sql("DROP TABLE dbx.tab1") + intercept[AnalysisException] { + sql("DROP TABLE dbx.tab1") + } } - test("drop view in SQLContext") { - // SQLContext does not support create view. Log an error message, if tab1 does not exists - sql("DROP VIEW tab1") - + test("drop view") { val catalog = spark.sessionState.catalog val tableIdent = TableIdentifier("tab1", Some("dbx")) createDatabase(catalog, "dbx") diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index a8645f7cd31d700a886a9e0dce12c835645350f9..2d5a970c12006fac5984fbdaf3670138a9e73236 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -515,7 +515,33 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "plan_json", // This test uses CREATE EXTERNAL TABLE without specifying LOCATION - "alter2" + "alter2", + + // These tests DROP TABLE that don't exist (but do not specify IF EXISTS) + "alter_rename_partition1", + "date_1", + "date_4", + "date_join1", + "date_serde", + "insert_compressed", + "lateral_view_cp", + "leftsemijoin", + "mapjoin_subquery2", + "nomore_ambiguous_table_col", + "partition_date", + "partition_varchar1", + "ppd_repeated_alias", + "push_or", + "reducesink_dedup", + "subquery_in", + "subquery_notin_having", + "timestamp_3", + "timestamp_lazy", + "udaf_covar_pop", + "union31", + "union_date", + "varchar_2", + "varchar_join1" ) /** @@ -529,7 +555,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "add_partition_with_whitelist", "alias_casted_column", "alter_partition_with_whitelist", - "alter_rename_partition", "ambiguous_col", "annotate_stats_join", "annotate_stats_limit", @@ -606,12 +631,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "database_drop", "database_location", "database_properties", - "date_1", "date_2", - "date_4", "date_comparison", - "date_join1", - "date_serde", "decimal_1", "decimal_4", "decimal_join", @@ -737,7 +758,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "insert1", "insert1_overwrite_partitions", "insert2_overwrite_partitions", - "insert_compressed", "join1", "join10", "join11", @@ -793,10 +813,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "join_reorder4", "join_star", "lateral_view", - "lateral_view_cp", "lateral_view_noalias", "lateral_view_ppd", - "leftsemijoin", "leftsemijoin_mr", "limit_pushdown_negative", "lineage1", @@ -824,7 +842,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "mapjoin_filter_on_outerjoin", "mapjoin_mapjoin", "mapjoin_subquery", - "mapjoin_subquery2", "mapjoin_test_outer", "mapreduce1", "mapreduce2", @@ -846,7 +863,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "multi_join_union", "multigroupby_singlemr", "noalias_subq1", - "nomore_ambiguous_table_col", "nonblock_op_deduplicate", "notable_alias1", "notable_alias2", @@ -870,10 +886,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "part_inherit_tbl_props", "part_inherit_tbl_props_with_star", "partcols1", - "partition_date", "partition_serde_format", "partition_type_check", - "partition_varchar1", "partition_wise_fileformat9", "ppd1", "ppd2", @@ -893,7 +907,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "ppd_outer_join4", "ppd_outer_join5", "ppd_random", - "ppd_repeated_alias", "ppd_udf_col", "ppd_union", "ppr_allchildsarenull", @@ -901,7 +914,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "ppr_pushdown2", "ppr_pushdown3", "progress_1", - "push_or", "query_with_semi", "quote1", "quote2", @@ -913,7 +925,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "reduce_deduplicate_exclude_gby", "reduce_deduplicate_exclude_join", "reduce_deduplicate_extended", - "reducesink_dedup", "router_join_ppr", "select_as_omitted", "select_unquote_and", @@ -936,20 +947,15 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "subquery_exists_having", "subquery_notexists", "subquery_notexists_having", - "subquery_in", "subquery_in_having", - "subquery_notin_having", "tablename_with_select", - "timestamp_3", "timestamp_comparison", - "timestamp_lazy", "timestamp_null", "transform_ppr1", "transform_ppr2", "type_cast_1", "type_widening", "udaf_collect_set", - "udaf_covar_pop", "udaf_histogram_numeric", "udf2", "udf5", @@ -1113,7 +1119,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "union29", "union3", "union30", - "union31", "union33", "union34", "union4", @@ -1122,13 +1127,10 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { "union7", "union8", "union9", - "union_date", "union_lateralview", "union_ppr", "union_remove_6", "union_script", - "varchar_2", - "varchar_join1", "varchar_union1", "view", "view_cast", diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala index de592f8d937dd59de1f19b96162d467303fcb86f..6c3978154d4b6bff117fa210ff321d099990e9b7 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala @@ -826,15 +826,17 @@ class HiveWindowFunctionQueryFileSuite "windowing_ntile", "windowing_udaf", "windowing_windowspec", - "windowing_rank" - ) + "windowing_rank", - override def whiteList: Seq[String] = Seq( - "windowing_udaf2", + // These tests DROP TABLE that don't exist (but do not specify IF EXISTS) "windowing_columnPruning", "windowing_adjust_rowcontainer_sz" ) + override def whiteList: Seq[String] = Seq( + "windowing_udaf2" + ) + // Only run those query tests in the realWhileList (do not try other ignored query files). override def testCases: Seq[(String, File)] = super.testCases.filter { case (name, _) => realWhiteList.contains(name) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index 686c63065dfcca6ff8e75ba914961fe45ad19e2f..153b0c3c72a785f7d04c7c2c7edda70231140682 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -548,7 +548,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv }.getMessage.contains("Unable to infer schema"), "We should complain that path is not specified.") - sql("DROP TABLE createdJsonTable") + sql("DROP TABLE IF EXISTS createdJsonTable") } test("scan a parquet table created through a CTAS statement") { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala index cc05e56d6616401fc93b47a59ad66385d3094755..266fdd6c1f19ff633b4c723168e2fff13267eb8c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala @@ -61,8 +61,8 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl checkAnswer(sql("select key,value from table_with_partition"), testData.toDF.collect ++ testData.toDF.collect ++ testData.toDF.collect) - sql("DROP TABLE table_with_partition") - sql("DROP TABLE createAndInsertTest") + sql("DROP TABLE IF EXISTS table_with_partition") + sql("DROP TABLE IF EXISTS createAndInsertTest") } } }