From dfdcab00c7b6200c22883baa3ebc5818be09556f Mon Sep 17 00:00:00 2001
From: Zheng RuiFeng <ruifengz@foxmail.com>
Date: Mon, 9 May 2016 11:20:48 -0700
Subject: [PATCH] [SPARK-15210][SQL] Add missing @DeveloperApi annotation in
 sql.types

add DeveloperApi annotation for `AbstractDataType` `MapType` `UserDefinedType`

local build

Author: Zheng RuiFeng <ruifengz@foxmail.com>

Closes #12982 from zhengruifeng/types_devapi.
---
 .../scala/org/apache/spark/sql/types/AbstractDataType.scala    | 2 ++
 .../src/main/scala/org/apache/spark/sql/types/MapType.scala    | 2 ++
 .../scala/org/apache/spark/sql/types/UserDefinedType.scala     | 3 ++-
 3 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index 90af10f7a6..03ea349221 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types
 import scala.reflect.ClassTag
 import scala.reflect.runtime.universe.{runtimeMirror, TypeTag}
 
+import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.sql.catalyst.expressions.Expression
 import org.apache.spark.util.Utils
@@ -141,6 +142,7 @@ protected[sql] abstract class AtomicType extends DataType {
  * :: DeveloperApi ::
  * Numeric data types.
  */
+@DeveloperApi
 abstract class NumericType extends AtomicType {
   // Unfortunately we can't get this implicitly as that breaks Spark Serialization. In order for
   // implicitly[Numeric[JvmType]] to be valid, we have to change JvmType from a type variable to a
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 5474954af7..454ea403ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
+import org.apache.spark.annotation.DeveloperApi
 
 /**
  * :: DeveloperApi ::
@@ -31,6 +32,7 @@ import org.json4s.JsonDSL._
  * @param valueType The data type of map values.
  * @param valueContainsNull Indicates if map values have `null` values.
  */
+@DeveloperApi
 case class MapType(
   keyType: DataType,
   valueType: DataType,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index aa36121bde..894631382f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -96,11 +96,12 @@ abstract class UserDefinedType[UserType >: Null] extends DataType with Serializa
 }
 
 /**
- * ::DeveloperApi::
+ * :: DeveloperApi ::
  * The user defined type in Python.
  *
  * Note: This can only be accessed via Python UDF, or accessed as serialized object.
  */
+@DeveloperApi
 private[sql] class PythonUserDefinedType(
     val sqlType: DataType,
     override val pyUDT: String,
-- 
GitLab