diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala index 16d61157379b4a79d2ec1bda57014571ac5f83c6..1494341d5846b5504e7c9879df5ba0c24ecfd4dc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases import org.apache.spark.sql.catalyst.catalog.{CatalogColumn, CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan -import org.apache.spark.sql.execution.datasources.{BucketSpec, DataSource, HadoopFsRelation, LogicalRelation} +import org.apache.spark.sql.execution.datasources._ import org.apache.spark.sql.internal.HiveSerDe import org.apache.spark.sql.sources.InsertableRelation import org.apache.spark.sql.types._ @@ -84,7 +84,7 @@ case class CreateDataSourceTableCommand( var isExternal = true val optionsWithPath = - if (!options.contains("path") && managedIfNoPath) { + if (!new CaseInsensitiveMap(options).contains("path") && managedIfNoPath) { isExternal = false options + ("path" -> sessionState.catalog.defaultTablePath(tableIdent)) } else { @@ -157,7 +157,7 @@ case class CreateDataSourceTableAsSelectCommand( var createMetastoreTable = false var isExternal = true val optionsWithPath = - if (!options.contains("path")) { + if (!new CaseInsensitiveMap(options).contains("path")) { isExternal = false options + ("path" -> sessionState.catalog.defaultTablePath(tableIdent)) } else { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index 4bdcb96feb0ad8925353d48a0b1294b8cd9ffb6a..78c8f0043d8a7c85887b3db9b545d82f606b8b03 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -944,7 +944,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv } test("CTAS: persisted partitioned data source table") { - withTempDir { dir => + withTempPath { dir => withTable("t") { val path = dir.getCanonicalPath @@ -968,7 +968,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv } test("CTAS: persisted bucketed data source table") { - withTempDir { dir => + withTempPath { dir => withTable("t") { val path = dir.getCanonicalPath @@ -988,7 +988,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv checkAnswer(table("t"), Row(1, 2)) } + } + withTempPath { dir => withTable("t") { val path = dir.getCanonicalPath @@ -1012,7 +1014,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv } test("CTAS: persisted partitioned bucketed data source table") { - withTempDir { dir => + withTempPath { dir => withTable("t") { val path = dir.getCanonicalPath @@ -1035,4 +1037,25 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv } } } + + test("SPARK-15025: create datasource table with path with select") { + withTempPath { dir => + withTable("t") { + val path = dir.getCanonicalPath + + sql( + s"""CREATE TABLE t USING PARQUET + |OPTIONS (PATH '$path') + |AS SELECT 1 AS a, 2 AS b, 3 AS c + """.stripMargin + ) + sql("insert into t values (2, 3, 4)") + checkAnswer(table("t"), Seq(Row(1, 2, 3), Row(2, 3, 4))) + val catalogTable = sharedState.externalCatalog.getTable("default", "t") + // there should not be a lowercase key 'path' now + assert(catalogTable.storage.serdeProperties.get("path").isEmpty) + assert(catalogTable.storage.serdeProperties.get("PATH").isDefined) + } + } + } }