diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala index 0baf4c9f8c7abb9906c495983603e45df33b454e..9ae019842217d2903749a66d56fc6472e8f91bfd 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala @@ -390,6 +390,7 @@ object HiveMetastoreTypes extends RegexParsers { case d: DecimalType => HiveShim.decimalMetastoreString(d) case TimestampType => "timestamp" case NullType => "void" + case udt: UserDefinedType[_] => toMetastoreType(udt.sqlType) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala index 4a64b5f5eb1b4e571c37aadb5543477898fb43b4..86535f8dd4f58bd283ea9470224cebb33d6ecfdc 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala @@ -19,7 +19,8 @@ package org.apache.spark.sql.hive import org.scalatest.FunSuite -import org.apache.spark.sql.catalyst.types.{DataType, StructType} +import org.apache.spark.sql.catalyst.types.StructType +import org.apache.spark.sql.test.ExamplePointUDT class HiveMetastoreCatalogSuite extends FunSuite { @@ -29,4 +30,10 @@ class HiveMetastoreCatalogSuite extends FunSuite { val datatype = HiveMetastoreTypes.toDataType(metastr) assert(datatype.isInstanceOf[StructType]) } + + test("udt to metastore type conversion") { + val udt = new ExamplePointUDT + assert(HiveMetastoreTypes.toMetastoreType(udt) === + HiveMetastoreTypes.toMetastoreType(udt.sqlType)) + } }