From 04d417a7ca8ef694658b26fb697a035717414731 Mon Sep 17 00:00:00 2001
From: Reynold Xin <rxin@databricks.com>
Date: Thu, 13 Oct 2016 11:12:30 -0700
Subject: [PATCH] [SPARK-17830][SQL] Annotate remaining SQL APIs with
 InterfaceStability

## What changes were proposed in this pull request?
This patch annotates all the remaining APIs in SQL (excluding streaming) with InterfaceStability.

## How was this patch tested?
N/A - just annotation change.

Author: Reynold Xin <rxin@databricks.com>

Closes #15457 from rxin/SPARK-17830-2.
---
 .../java/org/apache/spark/sql/SaveMode.java   |  3 +++
 .../org/apache/spark/sql/api/java/UDF1.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF10.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF11.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF12.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF13.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF14.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF15.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF16.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF17.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF18.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF19.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF2.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF20.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF21.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF22.java  |  8 +++---
 .../org/apache/spark/sql/api/java/UDF3.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF4.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF5.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF6.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF7.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF8.java   |  8 +++---
 .../org/apache/spark/sql/api/java/UDF9.java   |  8 +++---
 .../spark/sql/expressions/javalang/typed.java |  2 ++
 .../apache/spark/sql/catalog/Catalog.scala    |  9 ++++++-
 .../apache/spark/sql/catalog/interface.scala  |  5 ++++
 .../spark/sql/expressions/Aggregator.scala    |  3 ++-
 .../sql/expressions/UserDefinedFunction.scala |  3 ++-
 .../apache/spark/sql/expressions/Window.scala |  4 ++-
 .../spark/sql/expressions/WindowSpec.scala    |  7 ++---
 .../sql/expressions/scalalang/typed.scala     |  3 ++-
 .../apache/spark/sql/expressions/udaf.scala   |  8 +++++-
 .../apache/spark/sql/jdbc/JdbcDialects.scala  |  5 +++-
 .../apache/spark/sql/sources/filters.scala    | 18 +++++++++++++
 .../apache/spark/sql/sources/interfaces.scala | 26 +++++++++++++++++--
 35 files changed, 150 insertions(+), 122 deletions(-)

diff --git a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
index 9665c3c46f..1c3c9794fb 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java
@@ -16,11 +16,14 @@
  */
 package org.apache.spark.sql;
 
+import org.apache.spark.annotation.InterfaceStability;
+
 /**
  * SaveMode is used to specify the expected behavior of saving a DataFrame to a data source.
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 public enum SaveMode {
   /**
    * Append mode means that when saving a DataFrame to a data source, if data/table already exists,
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
index ef959e35e1..1460daf27d 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 1 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF1<T1, R> extends Serializable {
-  public R call(T1 t1) throws Exception;
+  R call(T1 t1) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java
index 96ab3a96c3..7c4f1e4897 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 10 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java
index 58ae8edd6d..26a05106ae 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 11 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java
index d9da0f6edd..8ef7a99042 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 12 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java
index 095fc1a807..5c3b2ec122 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 13 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java
index eb27eaa180..97e744d843 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 14 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java
index 1fbcff5633..7ddbf914fc 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 15 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java
index 1133561787..0ae5dc7195 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 16 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java
index dfae7922c9..03543a556c 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 17 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java
index e9d1c6d52d..46740d3443 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 18 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java
index 46b9d2d3c9..33fefd8eca 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 19 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java
index cd3fde8da4..9822f19217 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 2 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF2<T1, T2, R> extends Serializable {
-  public R call(T1 t1, T2 t2) throws Exception;
+  R call(T1 t1, T2 t2) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java
index 113d3d26be..8c5e90182d 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 20 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java
index 74118f2cf8..e3b09f5167 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 21 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java
index 0e7cc40be4..dc6cfa9097 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 22 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java
index 6a880f16be..7c264b69ba 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 3 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF3<T1, T2, T3, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java
index fcad2febb1..58df38fc3c 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 4 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF4<T1, T2, T3, T4, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java
index ce0cef43a2..4146f96e2e 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 5 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF5<T1, T2, T3, T4, T5, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java
index f56b806684..25d39654c1 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 6 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF6<T1, T2, T3, T4, T5, T6, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java
index 25bd6d3241..ce63b6a91a 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 7 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF7<T1, T2, T3, T4, T5, T6, T7, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java
index a3b7ac5f94..0e00209ef6 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 8 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java
index 205e72a152..077981bb3e 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java
@@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
 
-// **************************************************
-// THIS FILE IS AUTOGENERATED BY CODE IN
-// org.apache.spark.sql.api.java.FunctionRegistration
-// **************************************************
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * A Spark SQL UDF that has 9 arguments.
  */
+@InterfaceStability.Stable
 public interface UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> extends Serializable {
-  public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) throws Exception;
+  R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) throws Exception;
 }
diff --git a/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java b/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java
index 247e94b86c..ec9c107b1c 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.expressions.javalang;
 
 import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.api.java.function.MapFunction;
 import org.apache.spark.sql.TypedColumn;
 import org.apache.spark.sql.execution.aggregate.TypedAverage;
@@ -34,6 +35,7 @@ import org.apache.spark.sql.execution.aggregate.TypedSumLong;
  * @since 2.0.0
  */
 @Experimental
+@InterfaceStability.Evolving
 public class typed {
   // Note: make sure to keep in sync with typed.scala
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
index 18cba8ce28..889b8a0278 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalog
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset}
 import org.apache.spark.sql.types.StructType
 
@@ -27,6 +27,7 @@ import org.apache.spark.sql.types.StructType
  *
  * @since 2.0.0
  */
+@InterfaceStability.Stable
 abstract class Catalog {
 
   /**
@@ -193,6 +194,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(tableName: String, path: String): DataFrame
 
   /**
@@ -203,6 +205,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(tableName: String, path: String, source: String): DataFrame
 
   /**
@@ -213,6 +216,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(
       tableName: String,
       source: String,
@@ -227,6 +231,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(
       tableName: String,
       source: String,
@@ -240,6 +245,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(
       tableName: String,
       source: String,
@@ -255,6 +261,7 @@ abstract class Catalog {
    * @since 2.0.0
    */
   @Experimental
+  @InterfaceStability.Evolving
   def createExternalTable(
       tableName: String,
       source: String,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala
index 33032f07f7..c0c5ebc2ba 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.catalog
 
 import javax.annotation.Nullable
 
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.DefinedByConstructorParams
 
 
@@ -33,6 +34,7 @@ import org.apache.spark.sql.catalyst.DefinedByConstructorParams
  * @param locationUri path (in the form of a uri) to data files.
  * @since 2.0.0
  */
+@InterfaceStability.Stable
 class Database(
     val name: String,
     @Nullable val description: String,
@@ -59,6 +61,7 @@ class Database(
  * @param isTemporary whether the table is a temporary table.
  * @since 2.0.0
  */
+@InterfaceStability.Stable
 class Table(
     val name: String,
     @Nullable val database: String,
@@ -90,6 +93,7 @@ class Table(
  * @param isBucket whether the column is a bucket column.
  * @since 2.0.0
  */
+@InterfaceStability.Stable
 class Column(
     val name: String,
     @Nullable val description: String,
@@ -122,6 +126,7 @@ class Column(
  * @param isTemporary whether the function is a temporary function or not.
  * @since 2.0.0
  */
+@InterfaceStability.Stable
 class Function(
     val name: String,
     @Nullable val database: String,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala
index 51179a528c..eea9841400 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.expressions
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.{Dataset, Encoder, TypedColumn}
 import org.apache.spark.sql.catalyst.encoders.encoderFor
 import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Complete}
@@ -51,6 +51,7 @@ import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression
  * @since 1.6.0
  */
 @Experimental
+@InterfaceStability.Evolving
 abstract class Aggregator[-IN, BUF, OUT] extends Serializable {
 
   /**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala
index 49fdec5755..2e0e937e4a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.expressions
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.catalyst.expressions.ScalaUDF
 import org.apache.spark.sql.Column
 import org.apache.spark.sql.functions
@@ -40,6 +40,7 @@ import org.apache.spark.sql.types.DataType
  * @since 1.3.0
  */
 @Experimental
+@InterfaceStability.Evolving
 case class UserDefinedFunction protected[sql] (
     f: AnyRef,
     dataType: DataType,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
index 3c1f6e897e..07ef60183f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.expressions
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.Column
 import org.apache.spark.sql.catalyst.expressions._
 
@@ -37,6 +37,7 @@ import org.apache.spark.sql.catalyst.expressions._
  * @since 1.4.0
  */
 @Experimental
+@InterfaceStability.Evolving
 object Window {
 
   /**
@@ -177,4 +178,5 @@ object Window {
  * @since 1.4.0
  */
 @Experimental
+@InterfaceStability.Evolving
 class Window private()  // So we can see Window in JavaDoc.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala
index 8ebed399bf..18778c8d1c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.sql.expressions
 
-import org.apache.spark.annotation.Experimental
-import org.apache.spark.sql.{catalyst, Column}
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
+import org.apache.spark.sql.Column
 import org.apache.spark.sql.catalyst.expressions._
 
 /**
@@ -30,10 +30,11 @@ import org.apache.spark.sql.catalyst.expressions._
  * @since 1.4.0
  */
 @Experimental
+@InterfaceStability.Evolving
 class WindowSpec private[sql](
     partitionSpec: Seq[Expression],
     orderSpec: Seq[SortOrder],
-    frame: catalyst.expressions.WindowFrame) {
+    frame: WindowFrame) {
 
   /**
    * Defines the partitioning columns in a [[WindowSpec]].
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala
index 60d7b7d089..aa71cb9e3b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.expressions.scalalang
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql._
 import org.apache.spark.sql.execution.aggregate._
 
@@ -30,6 +30,7 @@ import org.apache.spark.sql.execution.aggregate._
  * @since 2.0.0
  */
 @Experimental
+@InterfaceStability.Evolving
 // scalastyle:off
 object typed {
   // scalastyle:on
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala
index 5417a0e481..ef7c09c72b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.expressions
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.{Column, Row}
 import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Complete}
 import org.apache.spark.sql.execution.aggregate.ScalaUDAF
@@ -26,8 +26,11 @@ import org.apache.spark.sql.types._
 /**
  * :: Experimental ::
  * The base class for implementing user-defined aggregate functions (UDAF).
+ *
+ * @since 1.5.0
  */
 @Experimental
+@InterfaceStability.Evolving
 abstract class UserDefinedAggregateFunction extends Serializable {
 
   /**
@@ -136,8 +139,11 @@ abstract class UserDefinedAggregateFunction extends Serializable {
  * A [[Row]] representing a mutable aggregation buffer.
  *
  * This is not meant to be extended outside of Spark.
+ *
+ * @since 1.5.0
  */
 @Experimental
+@InterfaceStability.Evolving
 abstract class MutableAggregationBuffer extends Row {
 
   /** Update the ith value of this buffer. */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 8dd4b8f662..dec316be7a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc
 
 import java.sql.Connection
 
-import org.apache.spark.annotation.{DeveloperApi, Since}
+import org.apache.spark.annotation.{DeveloperApi, InterfaceStability, Since}
 import org.apache.spark.sql.types._
 
 /**
@@ -31,6 +31,7 @@ import org.apache.spark.sql.types._
  *                     send a null value to the database.
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 case class JdbcType(databaseTypeDefinition : String, jdbcNullType : Int)
 
 /**
@@ -53,6 +54,7 @@ case class JdbcType(databaseTypeDefinition : String, jdbcNullType : Int)
  * for the given Catalyst type.
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 abstract class JdbcDialect extends Serializable {
   /**
    * Check if this dialect instance can handle a certain jdbc url.
@@ -142,6 +144,7 @@ abstract class JdbcDialect extends Serializable {
  * sure to register your dialects first.
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 object JdbcDialects {
 
   /**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala
index 13c0766219..e0494dfd93 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql.sources
 
+import org.apache.spark.annotation.InterfaceStability
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // This file defines all the filters that we can push down to the data sources.
 ////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -26,6 +28,7 @@ package org.apache.spark.sql.sources
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 abstract class Filter {
   /**
    * List of columns that are referenced by this filter.
@@ -45,6 +48,7 @@ abstract class Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class EqualTo(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -56,6 +60,7 @@ case class EqualTo(attribute: String, value: Any) extends Filter {
  *
  * @since 1.5.0
  */
+@InterfaceStability.Stable
 case class EqualNullSafe(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -66,6 +71,7 @@ case class EqualNullSafe(attribute: String, value: Any) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class GreaterThan(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -76,6 +82,7 @@ case class GreaterThan(attribute: String, value: Any) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -86,6 +93,7 @@ case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class LessThan(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -96,6 +104,7 @@ case class LessThan(attribute: String, value: Any) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class LessThanOrEqual(attribute: String, value: Any) extends Filter {
   override def references: Array[String] = Array(attribute) ++ findReferences(value)
 }
@@ -105,6 +114,7 @@ case class LessThanOrEqual(attribute: String, value: Any) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class In(attribute: String, values: Array[Any]) extends Filter {
   override def hashCode(): Int = {
     var h = attribute.hashCode
@@ -131,6 +141,7 @@ case class In(attribute: String, values: Array[Any]) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class IsNull(attribute: String) extends Filter {
   override def references: Array[String] = Array(attribute)
 }
@@ -140,6 +151,7 @@ case class IsNull(attribute: String) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class IsNotNull(attribute: String) extends Filter {
   override def references: Array[String] = Array(attribute)
 }
@@ -149,6 +161,7 @@ case class IsNotNull(attribute: String) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class And(left: Filter, right: Filter) extends Filter {
   override def references: Array[String] = left.references ++ right.references
 }
@@ -158,6 +171,7 @@ case class And(left: Filter, right: Filter) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class Or(left: Filter, right: Filter) extends Filter {
   override def references: Array[String] = left.references ++ right.references
 }
@@ -167,6 +181,7 @@ case class Or(left: Filter, right: Filter) extends Filter {
  *
  * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class Not(child: Filter) extends Filter {
   override def references: Array[String] = child.references
 }
@@ -177,6 +192,7 @@ case class Not(child: Filter) extends Filter {
  *
  * @since 1.3.1
  */
+@InterfaceStability.Stable
 case class StringStartsWith(attribute: String, value: String) extends Filter {
   override def references: Array[String] = Array(attribute)
 }
@@ -187,6 +203,7 @@ case class StringStartsWith(attribute: String, value: String) extends Filter {
  *
  * @since 1.3.1
  */
+@InterfaceStability.Stable
 case class StringEndsWith(attribute: String, value: String) extends Filter {
   override def references: Array[String] = Array(attribute)
 }
@@ -197,6 +214,7 @@ case class StringEndsWith(attribute: String, value: String) extends Filter {
  *
  * @since 1.3.1
  */
+@InterfaceStability.Stable
 case class StringContains(attribute: String, value: String) extends Filter {
   override def references: Array[String] = Array(attribute)
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
index 6484c782b5..3172d5ded9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.sources
 
-import org.apache.spark.annotation.{DeveloperApi, Experimental}
+import org.apache.spark.annotation.{DeveloperApi, Experimental, InterfaceStability}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.InternalRow
@@ -37,6 +37,7 @@ import org.apache.spark.sql.types.StructType
  * @since 1.5.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait DataSourceRegister {
 
   /**
@@ -68,6 +69,7 @@ trait DataSourceRegister {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait RelationProvider {
   /**
    * Returns a new base relation with the given parameters.
@@ -99,6 +101,7 @@ trait RelationProvider {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait SchemaRelationProvider {
   /**
    * Returns a new base relation with the given parameters and user defined schema.
@@ -114,17 +117,26 @@ trait SchemaRelationProvider {
 /**
  * ::Experimental::
  * Implemented by objects that can produce a streaming [[Source]] for a specific format or system.
+ *
+ * @since 2.0.0
  */
 @Experimental
+@InterfaceStability.Unstable
 trait StreamSourceProvider {
 
-  /** Returns the name and schema of the source that can be used to continually read data. */
+  /**
+   * Returns the name and schema of the source that can be used to continually read data.
+   * @since 2.0.0
+   */
   def sourceSchema(
       sqlContext: SQLContext,
       schema: Option[StructType],
       providerName: String,
       parameters: Map[String, String]): (String, StructType)
 
+  /**
+   * @since 2.0.0
+   */
   def createSource(
       sqlContext: SQLContext,
       metadataPath: String,
@@ -136,8 +148,11 @@ trait StreamSourceProvider {
 /**
  * ::Experimental::
  * Implemented by objects that can produce a streaming [[Sink]] for a specific format or system.
+ *
+ * @since 2.0.0
  */
 @Experimental
+@InterfaceStability.Unstable
 trait StreamSinkProvider {
   def createSink(
       sqlContext: SQLContext,
@@ -150,6 +165,7 @@ trait StreamSinkProvider {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait CreatableRelationProvider {
   /**
    * Save the DataFrame to the destination and return a relation with the given parameters based on
@@ -186,6 +202,7 @@ trait CreatableRelationProvider {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 abstract class BaseRelation {
   def sqlContext: SQLContext
   def schema: StructType
@@ -237,6 +254,7 @@ abstract class BaseRelation {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait TableScan {
   def buildScan(): RDD[Row]
 }
@@ -249,6 +267,7 @@ trait TableScan {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait PrunedScan {
   def buildScan(requiredColumns: Array[String]): RDD[Row]
 }
@@ -268,6 +287,7 @@ trait PrunedScan {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait PrunedFilteredScan {
   def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row]
 }
@@ -291,6 +311,7 @@ trait PrunedFilteredScan {
  * @since 1.3.0
  */
 @DeveloperApi
+@InterfaceStability.Evolving
 trait InsertableRelation {
   def insert(data: DataFrame, overwrite: Boolean): Unit
 }
@@ -306,6 +327,7 @@ trait InsertableRelation {
  * @since 1.3.0
  */
 @Experimental
+@InterfaceStability.Unstable
 trait CatalystScan {
   def buildScan(requiredColumns: Seq[Attribute], filters: Seq[Expression]): RDD[Row]
 }
-- 
GitLab