diff --git a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java index 9665c3c46f9019fcfbd85cbe199d65d5a1932b2f..1c3c9794fb6bb5b1a0f0dbd5a667967972c925c3 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java +++ b/sql/core/src/main/java/org/apache/spark/sql/SaveMode.java @@ -16,11 +16,14 @@ */ package org.apache.spark.sql; +import org.apache.spark.annotation.InterfaceStability; + /** * SaveMode is used to specify the expected behavior of saving a DataFrame to a data source. * * @since 1.3.0 */ +@InterfaceStability.Stable public enum SaveMode { /** * Append mode means that when saving a DataFrame to a data source, if data/table already exists, diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java index ef959e35e102754f2cae2caea0ff4ecfae23978c..1460daf27dc2079eca0d22c6fc45c075ea43c2f7 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF1.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 1 arguments. */ +@InterfaceStability.Stable public interface UDF1<T1, R> extends Serializable { - public R call(T1 t1) throws Exception; + R call(T1 t1) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java index 96ab3a96c3d5e3af65309332c908240fc8a6d956..7c4f1e4897084fd919487dee0605124217fe2b7f 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF10.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 10 arguments. */ +@InterfaceStability.Stable public interface UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java index 58ae8edd6d817abe715014009284ccaa4c5b84d9..26a05106aebd6e624d23ce2bcc00cce2aa4a25aa 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF11.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 11 arguments. */ +@InterfaceStability.Stable public interface UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java index d9da0f6eddd942965ad7fe31caf4ee6f8e22469d..8ef7a9904202555b0539f24509bc0943a5a0af14 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF12.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 12 arguments. */ +@InterfaceStability.Stable public interface UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java index 095fc1a8076b5dbb5d5e643342a811cffc37bef8..5c3b2ec1222e25bc5729d76e7968e7d9394f259c 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF13.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 13 arguments. */ +@InterfaceStability.Stable public interface UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java index eb27eaa1800864f1709d7df1e86ae9640c219188..97e744d8434660414dce5400d4a8e0a618f73912 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF14.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 14 arguments. */ +@InterfaceStability.Stable public interface UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java index 1fbcff56332b63ba367210b2404af90c0946e015..7ddbf914fc11ac5df5dd41bdf32794a0a94ee4eb 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF15.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 15 arguments. */ +@InterfaceStability.Stable public interface UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java index 1133561787a69bed1ffaf91954bced5f31d99da1..0ae5dc7195ad63bd8ea6adcbb225be9e8e0bba02 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF16.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 16 arguments. */ +@InterfaceStability.Stable public interface UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java index dfae7922c9b63fefec0e147c4dfd12ffd45bd413..03543a556c6141618fad744c03ceba9b228ec979 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF17.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 17 arguments. */ +@InterfaceStability.Stable public interface UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java index e9d1c6d52d4ea2d58732b5cd09ac7fa789b04aa7..46740d3443916157268365d9ba3b9865a24e2e8a 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF18.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 18 arguments. */ +@InterfaceStability.Stable public interface UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java index 46b9d2d3c945796f4032700b964b243b36cace67..33fefd8ecaf1d8becefb5c187ce40503aeefa17e 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF19.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 19 arguments. */ +@InterfaceStability.Stable public interface UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java index cd3fde8da419e6c8ab4bc92f042099a77c4027d5..9822f19217d7605255283ddccff4791b81722599 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF2.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 2 arguments. */ +@InterfaceStability.Stable public interface UDF2<T1, T2, R> extends Serializable { - public R call(T1 t1, T2 t2) throws Exception; + R call(T1 t1, T2 t2) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java index 113d3d26be4a71b0d2148c706de4ed8dd7fb5991..8c5e90182da1c3a557d4ec93c476dc19fe659594 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF20.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 20 arguments. */ +@InterfaceStability.Stable public interface UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java index 74118f2cf8da761b20e0bc703d66a13322f009bc..e3b09f5167cffe91813445633136480d633a96fd 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF21.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 21 arguments. */ +@InterfaceStability.Stable public interface UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java index 0e7cc40be45ec85c3a87e94395d9518ab673874c..dc6cfa9097bab0911001cf8e1ce07c64c414183c 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF22.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 22 arguments. */ +@InterfaceStability.Stable public interface UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java index 6a880f16be47a3a78098070f5ed65de55f5ad4d5..7c264b69ba19541104f89b3116064da2782e2e1a 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF3.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 3 arguments. */ +@InterfaceStability.Stable public interface UDF3<T1, T2, T3, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3) throws Exception; + R call(T1 t1, T2 t2, T3 t3) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java index fcad2febb18e6e021810e69d3617e3bbaa00d7e2..58df38fc3c911b7abd09e4168d46ad767f4e0296 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF4.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 4 arguments. */ +@InterfaceStability.Stable public interface UDF4<T1, T2, T3, T4, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java index ce0cef43a214425aa800e0db170ed3b020c866f7..4146f96e2eed5dda284390db87d2d577aa101757 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF5.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 5 arguments. */ +@InterfaceStability.Stable public interface UDF5<T1, T2, T3, T4, T5, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java index f56b806684e61ea110c65a927eac3985c6d444f9..25d39654c10957b63d56ca2ad1b0fdaa50b73d3c 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF6.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 6 arguments. */ +@InterfaceStability.Stable public interface UDF6<T1, T2, T3, T4, T5, T6, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java index 25bd6d3241bd4ff15cb545f1220879daf5d312ac..ce63b6a91adbbd02246a4a678316ce5248b45196 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF7.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 7 arguments. */ +@InterfaceStability.Stable public interface UDF7<T1, T2, T3, T4, T5, T6, T7, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java index a3b7ac5f94ce7b65dfc7eeeccca50318263299bd..0e00209ef6b9f376e052da23c4098a4f120ea644 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF8.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 8 arguments. */ +@InterfaceStability.Stable public interface UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java index 205e72a1522fccc937814bbcc9e19acf14126a64..077981bb3e3ee39f228a7199efc3c0bb105006f0 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java +++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/UDF9.java @@ -19,14 +19,12 @@ package org.apache.spark.sql.api.java; import java.io.Serializable; -// ************************************************** -// THIS FILE IS AUTOGENERATED BY CODE IN -// org.apache.spark.sql.api.java.FunctionRegistration -// ************************************************** +import org.apache.spark.annotation.InterfaceStability; /** * A Spark SQL UDF that has 9 arguments. */ +@InterfaceStability.Stable public interface UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> extends Serializable { - public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) throws Exception; + R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) throws Exception; } diff --git a/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java b/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java index 247e94b86c3490dfb792c00c563de21795ca93bf..ec9c107b1c119a81291a75458053775256d30306 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java +++ b/sql/core/src/main/java/org/apache/spark/sql/expressions/javalang/typed.java @@ -18,6 +18,7 @@ package org.apache.spark.sql.expressions.javalang; import org.apache.spark.annotation.Experimental; +import org.apache.spark.annotation.InterfaceStability; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.TypedColumn; import org.apache.spark.sql.execution.aggregate.TypedAverage; @@ -34,6 +35,7 @@ import org.apache.spark.sql.execution.aggregate.TypedSumLong; * @since 2.0.0 */ @Experimental +@InterfaceStability.Evolving public class typed { // Note: make sure to keep in sync with typed.scala diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala index 18cba8ce28b4d45ab3e273cc001688d380168666..889b8a02784d6122425141d8f272a819269906c0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalog -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset} import org.apache.spark.sql.types.StructType @@ -27,6 +27,7 @@ import org.apache.spark.sql.types.StructType * * @since 2.0.0 */ +@InterfaceStability.Stable abstract class Catalog { /** @@ -193,6 +194,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable(tableName: String, path: String): DataFrame /** @@ -203,6 +205,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable(tableName: String, path: String, source: String): DataFrame /** @@ -213,6 +216,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable( tableName: String, source: String, @@ -227,6 +231,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable( tableName: String, source: String, @@ -240,6 +245,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable( tableName: String, source: String, @@ -255,6 +261,7 @@ abstract class Catalog { * @since 2.0.0 */ @Experimental + @InterfaceStability.Evolving def createExternalTable( tableName: String, source: String, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala index 33032f07f7bea6fa7681dafa1b08ea1dbd0b2906..c0c5ebc2ba2d6fcf794c1a7fcb4f140a1de83574 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/interface.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalog import javax.annotation.Nullable +import org.apache.spark.annotation.InterfaceStability import org.apache.spark.sql.catalyst.DefinedByConstructorParams @@ -33,6 +34,7 @@ import org.apache.spark.sql.catalyst.DefinedByConstructorParams * @param locationUri path (in the form of a uri) to data files. * @since 2.0.0 */ +@InterfaceStability.Stable class Database( val name: String, @Nullable val description: String, @@ -59,6 +61,7 @@ class Database( * @param isTemporary whether the table is a temporary table. * @since 2.0.0 */ +@InterfaceStability.Stable class Table( val name: String, @Nullable val database: String, @@ -90,6 +93,7 @@ class Table( * @param isBucket whether the column is a bucket column. * @since 2.0.0 */ +@InterfaceStability.Stable class Column( val name: String, @Nullable val description: String, @@ -122,6 +126,7 @@ class Column( * @param isTemporary whether the function is a temporary function or not. * @since 2.0.0 */ +@InterfaceStability.Stable class Function( val name: String, @Nullable val database: String, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala index 51179a528c50337a5841b5f7542e8b656eebacd5..eea98414003ba70ef1a1fcbfc2f79abad6503ff4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql.{Dataset, Encoder, TypedColumn} import org.apache.spark.sql.catalyst.encoders.encoderFor import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Complete} @@ -51,6 +51,7 @@ import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression * @since 1.6.0 */ @Experimental +@InterfaceStability.Evolving abstract class Aggregator[-IN, BUF, OUT] extends Serializable { /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala index 49fdec57558e8a78b6779a3b21896b53254c7a12..2e0e937e4aff7ef93993ce103eba0a21f1868257 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql.catalyst.expressions.ScalaUDF import org.apache.spark.sql.Column import org.apache.spark.sql.functions @@ -40,6 +40,7 @@ import org.apache.spark.sql.types.DataType * @since 1.3.0 */ @Experimental +@InterfaceStability.Evolving case class UserDefinedFunction protected[sql] ( f: AnyRef, dataType: DataType, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala index 3c1f6e897ea62a3337dde4cd6d970b5f3676297a..07ef60183f6fb69a29b3e7a4dcc090b4446fd5c4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.expressions._ @@ -37,6 +37,7 @@ import org.apache.spark.sql.catalyst.expressions._ * @since 1.4.0 */ @Experimental +@InterfaceStability.Evolving object Window { /** @@ -177,4 +178,5 @@ object Window { * @since 1.4.0 */ @Experimental +@InterfaceStability.Evolving class Window private() // So we can see Window in JavaDoc. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala index 8ebed399bf2d00943a74d33123f980af2aa9b97b..18778c8d1c294403e53be418359d567fb9e7314c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.expressions -import org.apache.spark.annotation.Experimental -import org.apache.spark.sql.{catalyst, Column} +import org.apache.spark.annotation.{Experimental, InterfaceStability} +import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.expressions._ /** @@ -30,10 +30,11 @@ import org.apache.spark.sql.catalyst.expressions._ * @since 1.4.0 */ @Experimental +@InterfaceStability.Evolving class WindowSpec private[sql]( partitionSpec: Seq[Expression], orderSpec: Seq[SortOrder], - frame: catalyst.expressions.WindowFrame) { + frame: WindowFrame) { /** * Defines the partitioning columns in a [[WindowSpec]]. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala index 60d7b7d0894d07958019a688f5d2fadb9dc9b998..aa71cb9e3bc857dca5cab326382cf2334fea91c0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/scalalang/typed.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions.scalalang -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql._ import org.apache.spark.sql.execution.aggregate._ @@ -30,6 +30,7 @@ import org.apache.spark.sql.execution.aggregate._ * @since 2.0.0 */ @Experimental +@InterfaceStability.Evolving // scalastyle:off object typed { // scalastyle:on diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala index 5417a0e481158471ee5626a2acaf3b2cde6427a9..ef7c09c72b82dea7b2e29c0bf7b238195dadb8dd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/udaf.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql.{Column, Row} import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Complete} import org.apache.spark.sql.execution.aggregate.ScalaUDAF @@ -26,8 +26,11 @@ import org.apache.spark.sql.types._ /** * :: Experimental :: * The base class for implementing user-defined aggregate functions (UDAF). + * + * @since 1.5.0 */ @Experimental +@InterfaceStability.Evolving abstract class UserDefinedAggregateFunction extends Serializable { /** @@ -136,8 +139,11 @@ abstract class UserDefinedAggregateFunction extends Serializable { * A [[Row]] representing a mutable aggregation buffer. * * This is not meant to be extended outside of Spark. + * + * @since 1.5.0 */ @Experimental +@InterfaceStability.Evolving abstract class MutableAggregationBuffer extends Row { /** Update the ith value of this buffer. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala index 8dd4b8f662713a2e7c7816f9479b62dc866994f4..dec316be7aea1e9efaa79c72f65e33ce7abbd655 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc import java.sql.Connection -import org.apache.spark.annotation.{DeveloperApi, Since} +import org.apache.spark.annotation.{DeveloperApi, InterfaceStability, Since} import org.apache.spark.sql.types._ /** @@ -31,6 +31,7 @@ import org.apache.spark.sql.types._ * send a null value to the database. */ @DeveloperApi +@InterfaceStability.Evolving case class JdbcType(databaseTypeDefinition : String, jdbcNullType : Int) /** @@ -53,6 +54,7 @@ case class JdbcType(databaseTypeDefinition : String, jdbcNullType : Int) * for the given Catalyst type. */ @DeveloperApi +@InterfaceStability.Evolving abstract class JdbcDialect extends Serializable { /** * Check if this dialect instance can handle a certain jdbc url. @@ -142,6 +144,7 @@ abstract class JdbcDialect extends Serializable { * sure to register your dialects first. */ @DeveloperApi +@InterfaceStability.Evolving object JdbcDialects { /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala index 13c0766219a8eb4bb5ef116e5dabf09ed2d4cba8..e0494dfd9343b4d71e22fa8d47547efa94aca0fa 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql.sources +import org.apache.spark.annotation.InterfaceStability + //////////////////////////////////////////////////////////////////////////////////////////////////// // This file defines all the filters that we can push down to the data sources. //////////////////////////////////////////////////////////////////////////////////////////////////// @@ -26,6 +28,7 @@ package org.apache.spark.sql.sources * * @since 1.3.0 */ +@InterfaceStability.Stable abstract class Filter { /** * List of columns that are referenced by this filter. @@ -45,6 +48,7 @@ abstract class Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class EqualTo(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -56,6 +60,7 @@ case class EqualTo(attribute: String, value: Any) extends Filter { * * @since 1.5.0 */ +@InterfaceStability.Stable case class EqualNullSafe(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -66,6 +71,7 @@ case class EqualNullSafe(attribute: String, value: Any) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class GreaterThan(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -76,6 +82,7 @@ case class GreaterThan(attribute: String, value: Any) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -86,6 +93,7 @@ case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class LessThan(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -96,6 +104,7 @@ case class LessThan(attribute: String, value: Any) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class LessThanOrEqual(attribute: String, value: Any) extends Filter { override def references: Array[String] = Array(attribute) ++ findReferences(value) } @@ -105,6 +114,7 @@ case class LessThanOrEqual(attribute: String, value: Any) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class In(attribute: String, values: Array[Any]) extends Filter { override def hashCode(): Int = { var h = attribute.hashCode @@ -131,6 +141,7 @@ case class In(attribute: String, values: Array[Any]) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class IsNull(attribute: String) extends Filter { override def references: Array[String] = Array(attribute) } @@ -140,6 +151,7 @@ case class IsNull(attribute: String) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class IsNotNull(attribute: String) extends Filter { override def references: Array[String] = Array(attribute) } @@ -149,6 +161,7 @@ case class IsNotNull(attribute: String) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class And(left: Filter, right: Filter) extends Filter { override def references: Array[String] = left.references ++ right.references } @@ -158,6 +171,7 @@ case class And(left: Filter, right: Filter) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class Or(left: Filter, right: Filter) extends Filter { override def references: Array[String] = left.references ++ right.references } @@ -167,6 +181,7 @@ case class Or(left: Filter, right: Filter) extends Filter { * * @since 1.3.0 */ +@InterfaceStability.Stable case class Not(child: Filter) extends Filter { override def references: Array[String] = child.references } @@ -177,6 +192,7 @@ case class Not(child: Filter) extends Filter { * * @since 1.3.1 */ +@InterfaceStability.Stable case class StringStartsWith(attribute: String, value: String) extends Filter { override def references: Array[String] = Array(attribute) } @@ -187,6 +203,7 @@ case class StringStartsWith(attribute: String, value: String) extends Filter { * * @since 1.3.1 */ +@InterfaceStability.Stable case class StringEndsWith(attribute: String, value: String) extends Filter { override def references: Array[String] = Array(attribute) } @@ -197,6 +214,7 @@ case class StringEndsWith(attribute: String, value: String) extends Filter { * * @since 1.3.1 */ +@InterfaceStability.Stable case class StringContains(attribute: String, value: String) extends Filter { override def references: Array[String] = Array(attribute) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala index 6484c782b5d1541fab5ffc16763a6f9057bfaaad..3172d5ded9504098fbeb8f8d84781632ed8403d8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.sources -import org.apache.spark.annotation.{DeveloperApi, Experimental} +import org.apache.spark.annotation.{DeveloperApi, Experimental, InterfaceStability} import org.apache.spark.rdd.RDD import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.InternalRow @@ -37,6 +37,7 @@ import org.apache.spark.sql.types.StructType * @since 1.5.0 */ @DeveloperApi +@InterfaceStability.Evolving trait DataSourceRegister { /** @@ -68,6 +69,7 @@ trait DataSourceRegister { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait RelationProvider { /** * Returns a new base relation with the given parameters. @@ -99,6 +101,7 @@ trait RelationProvider { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait SchemaRelationProvider { /** * Returns a new base relation with the given parameters and user defined schema. @@ -114,17 +117,26 @@ trait SchemaRelationProvider { /** * ::Experimental:: * Implemented by objects that can produce a streaming [[Source]] for a specific format or system. + * + * @since 2.0.0 */ @Experimental +@InterfaceStability.Unstable trait StreamSourceProvider { - /** Returns the name and schema of the source that can be used to continually read data. */ + /** + * Returns the name and schema of the source that can be used to continually read data. + * @since 2.0.0 + */ def sourceSchema( sqlContext: SQLContext, schema: Option[StructType], providerName: String, parameters: Map[String, String]): (String, StructType) + /** + * @since 2.0.0 + */ def createSource( sqlContext: SQLContext, metadataPath: String, @@ -136,8 +148,11 @@ trait StreamSourceProvider { /** * ::Experimental:: * Implemented by objects that can produce a streaming [[Sink]] for a specific format or system. + * + * @since 2.0.0 */ @Experimental +@InterfaceStability.Unstable trait StreamSinkProvider { def createSink( sqlContext: SQLContext, @@ -150,6 +165,7 @@ trait StreamSinkProvider { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait CreatableRelationProvider { /** * Save the DataFrame to the destination and return a relation with the given parameters based on @@ -186,6 +202,7 @@ trait CreatableRelationProvider { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving abstract class BaseRelation { def sqlContext: SQLContext def schema: StructType @@ -237,6 +254,7 @@ abstract class BaseRelation { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait TableScan { def buildScan(): RDD[Row] } @@ -249,6 +267,7 @@ trait TableScan { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait PrunedScan { def buildScan(requiredColumns: Array[String]): RDD[Row] } @@ -268,6 +287,7 @@ trait PrunedScan { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait PrunedFilteredScan { def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] } @@ -291,6 +311,7 @@ trait PrunedFilteredScan { * @since 1.3.0 */ @DeveloperApi +@InterfaceStability.Evolving trait InsertableRelation { def insert(data: DataFrame, overwrite: Boolean): Unit } @@ -306,6 +327,7 @@ trait InsertableRelation { * @since 1.3.0 */ @Experimental +@InterfaceStability.Unstable trait CatalystScan { def buildScan(requiredColumns: Seq[Attribute], filters: Seq[Expression]): RDD[Row] }