diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index e1d49912c311fb1cfd3f5d9a132f9e50392f610d..ffaefeb09aedb16fbc47cc5045f9344bcea34231 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -403,7 +403,7 @@ class SessionCatalog( val relation = if (name.database.isDefined || !tempTables.contains(table)) { val metadata = externalCatalog.getTable(db, table) - SimpleCatalogRelation(db, metadata, alias) + SimpleCatalogRelation(db, metadata) } else { tempTables(table) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala index 6197acab3378620225fc70675034235a5e391a46..b12606e17d3803535ede20ecd3b421c4a573ec9e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala @@ -244,8 +244,7 @@ trait CatalogRelation { */ case class SimpleCatalogRelation( databaseName: String, - metadata: CatalogTable, - alias: Option[String] = None) + metadata: CatalogTable) extends LeafNode with CatalogRelation { override def catalogTable: CatalogTable = metadata @@ -261,7 +260,7 @@ case class SimpleCatalogRelation( CatalystSqlParser.parseDataType(f.dataType), // Since data can be dumped in randomly with no validation, everything is nullable. nullable = true - )(qualifier = Some(alias.getOrElse(metadata.identifier.table))) + )(qualifier = Some(metadata.identifier.table)) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index c8e7c5103b6eb2343c006692ef4361c4eb0efd9f..05eb302c3c03ab972a52fa80082db8eda22711af 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -407,7 +407,7 @@ class SessionCatalogSuite extends SparkFunSuite { val relationWithAlias = SubqueryAlias(alias, SubqueryAlias("tbl1", - SimpleCatalogRelation("db2", tableMetadata, Some(alias)))) + SimpleCatalogRelation("db2", tableMetadata))) assert(catalog.lookupRelation( TableIdentifier("tbl1", Some("db2")), alias = None) == relation) assert(catalog.lookupRelation( diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala index 20e64a4e09daca47083ce0158306b4875b95b851..2be51ed0e87e74096011ad1fc9019cf345dfd2e2 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala @@ -180,8 +180,10 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log SubqueryAlias(aliasText, sessionState.sqlParser.parsePlan(viewText)) } } else { - MetastoreRelation( - qualifiedTableName.database, qualifiedTableName.name, alias)(table, client, sparkSession) + val qualifiedTable = + MetastoreRelation( + qualifiedTableName.database, qualifiedTableName.name)(table, client, sparkSession) + alias.map(a => SubqueryAlias(a, qualifiedTable)).getOrElse(qualifiedTable) } } @@ -385,7 +387,7 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log // Read path case relation: MetastoreRelation if shouldConvertMetastoreParquet(relation) => val parquetRelation = convertToParquetRelation(relation) - SubqueryAlias(relation.alias.getOrElse(relation.tableName), parquetRelation) + SubqueryAlias(relation.tableName, parquetRelation) } } } @@ -423,7 +425,7 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log // Read path case relation: MetastoreRelation if shouldConvertMetastoreOrc(relation) => val orcRelation = convertToOrcRelation(relation) - SubqueryAlias(relation.alias.getOrElse(relation.tableName), orcRelation) + SubqueryAlias(relation.tableName, orcRelation) } } } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala index 58bca2059cacc085252f5c099b00d7693c3fa5a7..3ab1bdabb99b3f60bacc391099391f434045131b 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala @@ -41,8 +41,7 @@ import org.apache.spark.sql.hive.client.HiveClient private[hive] case class MetastoreRelation( databaseName: String, - tableName: String, - alias: Option[String]) + tableName: String) (val catalogTable: CatalogTable, @transient private val client: HiveClient, @transient private val sparkSession: SparkSession) @@ -52,13 +51,12 @@ private[hive] case class MetastoreRelation( case relation: MetastoreRelation => databaseName == relation.databaseName && tableName == relation.tableName && - alias == relation.alias && output == relation.output case _ => false } override def hashCode(): Int = { - Objects.hashCode(databaseName, tableName, alias, output) + Objects.hashCode(databaseName, tableName, output) } override protected def otherCopyArgs: Seq[AnyRef] = catalogTable :: sparkSession :: Nil @@ -208,7 +206,7 @@ private[hive] case class MetastoreRelation( CatalystSqlParser.parseDataType(f.dataType), // Since data can be dumped in randomly with no validation, everything is nullable. nullable = true - )(qualifier = Some(alias.getOrElse(tableName))) + )(qualifier = Some(tableName)) } /** PartitionKey attributes */ @@ -243,6 +241,6 @@ private[hive] case class MetastoreRelation( } override def newInstance(): MetastoreRelation = { - MetastoreRelation(databaseName, tableName, alias)(catalogTable, client, sparkSession) + MetastoreRelation(databaseName, tableName)(catalogTable, client, sparkSession) } }