diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala index 04f0cfce883f2779f3fecaa931396f2ee7c3118b..61c153c10e47c24834c6ea230496cc541b204c8b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst import java.beans.{Introspector, PropertyDescriptor} import java.lang.{Iterable => JIterable} +import java.lang.reflect.Type import java.util.{Iterator => JIterator, List => JList, Map => JMap} import scala.language.existentials @@ -54,12 +55,21 @@ object JavaTypeInference { inferDataType(TypeToken.of(beanClass)) } + /** + * Infers the corresponding SQL data type of a Java type. + * @param beanType Java type + * @return (SQL data type, nullable) + */ + private[sql] def inferDataType(beanType: Type): (DataType, Boolean) = { + inferDataType(TypeToken.of(beanType)) + } + /** * Infers the corresponding SQL data type of a Java type. * @param typeToken Java type * @return (SQL data type, nullable) */ - private[sql] def inferDataType(typeToken: TypeToken[_]): (DataType, Boolean) = { + private def inferDataType(typeToken: TypeToken[_]): (DataType, Boolean) = { typeToken.getRawType match { case c: Class[_] if c.isAnnotationPresent(classOf[SQLUserDefinedType]) => (c.getAnnotation(classOf[SQLUserDefinedType]).udt().newInstance(), true) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala b/sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala index c8be89c64695725bcef646633e3b8cf326e283b4..d94185b39044819efd1ab328d52c782e58c3f0e1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala @@ -23,8 +23,6 @@ import java.lang.reflect.{ParameterizedType, Type} import scala.reflect.runtime.universe.TypeTag import scala.util.Try -import com.google.common.reflect.TypeToken - import org.apache.spark.annotation.InterfaceStability import org.apache.spark.internal.Logging import org.apache.spark.sql.api.java._ @@ -446,7 +444,7 @@ class UDFRegistration private[sql] (functionRegistry: FunctionRegistry) extends val udfReturnType = udfInterfaces(0).getActualTypeArguments.last var returnType = returnDataType if (returnType == null) { - returnType = JavaTypeInference.inferDataType(TypeToken.of(udfReturnType))._1 + returnType = JavaTypeInference.inferDataType(udfReturnType)._1 } udfInterfaces(0).getActualTypeArguments.length match {