Skip to content
Snippets Groups Projects
Commit f280ccf4 authored by Marcelo Vanzin's avatar Marcelo Vanzin
Browse files

[SPARK-18835][SQL] Don't expose Guava types in the JavaTypeInference API.

This avoids issues during maven tests because of shading.

Author: Marcelo Vanzin <vanzin@cloudera.com>

Closes #16260 from vanzin/SPARK-18835.
parent fb3081d3
No related branches found
No related tags found
No related merge requests found
...@@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst ...@@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst
import java.beans.{Introspector, PropertyDescriptor} import java.beans.{Introspector, PropertyDescriptor}
import java.lang.{Iterable => JIterable} import java.lang.{Iterable => JIterable}
import java.lang.reflect.Type
import java.util.{Iterator => JIterator, List => JList, Map => JMap} import java.util.{Iterator => JIterator, List => JList, Map => JMap}
import scala.language.existentials import scala.language.existentials
...@@ -54,12 +55,21 @@ object JavaTypeInference { ...@@ -54,12 +55,21 @@ object JavaTypeInference {
inferDataType(TypeToken.of(beanClass)) inferDataType(TypeToken.of(beanClass))
} }
/**
* Infers the corresponding SQL data type of a Java type.
* @param beanType Java type
* @return (SQL data type, nullable)
*/
private[sql] def inferDataType(beanType: Type): (DataType, Boolean) = {
inferDataType(TypeToken.of(beanType))
}
/** /**
* Infers the corresponding SQL data type of a Java type. * Infers the corresponding SQL data type of a Java type.
* @param typeToken Java type * @param typeToken Java type
* @return (SQL data type, nullable) * @return (SQL data type, nullable)
*/ */
private[sql] def inferDataType(typeToken: TypeToken[_]): (DataType, Boolean) = { private def inferDataType(typeToken: TypeToken[_]): (DataType, Boolean) = {
typeToken.getRawType match { typeToken.getRawType match {
case c: Class[_] if c.isAnnotationPresent(classOf[SQLUserDefinedType]) => case c: Class[_] if c.isAnnotationPresent(classOf[SQLUserDefinedType]) =>
(c.getAnnotation(classOf[SQLUserDefinedType]).udt().newInstance(), true) (c.getAnnotation(classOf[SQLUserDefinedType]).udt().newInstance(), true)
......
...@@ -23,8 +23,6 @@ import java.lang.reflect.{ParameterizedType, Type} ...@@ -23,8 +23,6 @@ import java.lang.reflect.{ParameterizedType, Type}
import scala.reflect.runtime.universe.TypeTag import scala.reflect.runtime.universe.TypeTag
import scala.util.Try import scala.util.Try
import com.google.common.reflect.TypeToken
import org.apache.spark.annotation.InterfaceStability import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.internal.Logging import org.apache.spark.internal.Logging
import org.apache.spark.sql.api.java._ import org.apache.spark.sql.api.java._
...@@ -446,7 +444,7 @@ class UDFRegistration private[sql] (functionRegistry: FunctionRegistry) extends ...@@ -446,7 +444,7 @@ class UDFRegistration private[sql] (functionRegistry: FunctionRegistry) extends
val udfReturnType = udfInterfaces(0).getActualTypeArguments.last val udfReturnType = udfInterfaces(0).getActualTypeArguments.last
var returnType = returnDataType var returnType = returnDataType
if (returnType == null) { if (returnType == null) {
returnType = JavaTypeInference.inferDataType(TypeToken.of(udfReturnType))._1 returnType = JavaTypeInference.inferDataType(udfReturnType)._1
} }
udfInterfaces(0).getActualTypeArguments.length match { udfInterfaces(0).getActualTypeArguments.length match {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment