Skip to content
Snippets Groups Projects
Commit fa01bec4 authored by Josh Rosen's avatar Josh Rosen Committed by Reynold Xin
Browse files

[Build] Enable MiMa checks for SQL

Now that 1.3 has been released, we should enable MiMa checks for the `sql` subproject.

Author: Josh Rosen <joshrosen@databricks.com>

Closes #5727 from JoshRosen/enable-more-mima-checks and squashes the following commits:

3ad302b [Josh Rosen] Merge remote-tracking branch 'origin/master' into enable-more-mima-checks
0c48e4d [Josh Rosen] Merge remote-tracking branch 'origin/master' into enable-more-mima-checks
e276cee [Josh Rosen] Fix SQL MiMa checks via excludes and private[sql]
44d0d01 [Josh Rosen] Add back 'launcher' exclude
1aae027 [Josh Rosen] Enable MiMa checks for launcher and sql projects.
parent 77cc25fb
No related branches found
No related tags found
No related merge requests found
...@@ -88,6 +88,22 @@ object MimaExcludes { ...@@ -88,6 +88,22 @@ object MimaExcludes {
"org.apache.spark.mllib.linalg.Vector.toSparse"), "org.apache.spark.mllib.linalg.Vector.toSparse"),
ProblemFilters.exclude[MissingMethodProblem]( ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.mllib.linalg.Vector.numActives") "org.apache.spark.mllib.linalg.Vector.numActives")
) ++ Seq(
// This `protected[sql]` method was removed in 1.3.1
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.SQLContext.checkAnalysis"),
// This `private[sql]` class was removed in 1.4.0:
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.execution.AddExchange"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.execution.AddExchange$"),
// These test support classes were moved out of src/main and into src/test:
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.ParquetTestData"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.ParquetTestData$"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.TestGroupWriteSupport")
) )
case v if v.startsWith("1.3") => case v if v.startsWith("1.3") =>
......
...@@ -156,9 +156,8 @@ object SparkBuild extends PomBuild { ...@@ -156,9 +156,8 @@ object SparkBuild extends PomBuild {
/* Enable tests settings for all projects except examples, assembly and tools */ /* Enable tests settings for all projects except examples, assembly and tools */
(allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings)) (allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings))
// TODO: Add Sql to mima checks // TODO: remove launcher from this list after 1.4.0
// TODO: remove launcher from this list after 1.3. allProjects.filterNot(x => Seq(spark, hive, hiveThriftServer, catalyst, repl,
allProjects.filterNot(x => Seq(spark, sql, hive, hiveThriftServer, catalyst, repl,
networkCommon, networkShuffle, networkYarn, launcher, unsafe).contains(x)).foreach { networkCommon, networkShuffle, networkYarn, launcher, unsafe).contains(x)).foreach {
x => enable(MimaBuild.mimaSettings(sparkHome, x))(x) x => enable(MimaBuild.mimaSettings(sparkHome, x))(x)
} }
......
...@@ -84,7 +84,7 @@ object RDDConversions { ...@@ -84,7 +84,7 @@ object RDDConversions {
} }
/** Logical plan node for scanning data from an RDD. */ /** Logical plan node for scanning data from an RDD. */
case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext) private[sql] case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext)
extends LogicalPlan with MultiInstanceRelation { extends LogicalPlan with MultiInstanceRelation {
override def children: Seq[LogicalPlan] = Nil override def children: Seq[LogicalPlan] = Nil
...@@ -105,11 +105,12 @@ case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLCont ...@@ -105,11 +105,12 @@ case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLCont
} }
/** Physical plan node for scanning data from an RDD. */ /** Physical plan node for scanning data from an RDD. */
case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode { private[sql] case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode {
override def execute(): RDD[Row] = rdd override def execute(): RDD[Row] = rdd
} }
/** Logical plan node for scanning data from a local collection. */ /** Logical plan node for scanning data from a local collection. */
private[sql]
case class LogicalLocalTable(output: Seq[Attribute], rows: Seq[Row])(sqlContext: SQLContext) case class LogicalLocalTable(output: Seq[Attribute], rows: Seq[Row])(sqlContext: SQLContext)
extends LogicalPlan with MultiInstanceRelation { extends LogicalPlan with MultiInstanceRelation {
......
...@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute ...@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
/** /**
* Physical plan node for scanning data from a local collection. * Physical plan node for scanning data from a local collection.
*/ */
case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode { private[sql] case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode {
private lazy val rdd = sqlContext.sparkContext.parallelize(rows) private lazy val rdd = sqlContext.sparkContext.parallelize(rows)
......
...@@ -42,7 +42,7 @@ trait RunnableCommand extends logical.Command { ...@@ -42,7 +42,7 @@ trait RunnableCommand extends logical.Command {
* A physical operator that executes the run method of a `RunnableCommand` and * A physical operator that executes the run method of a `RunnableCommand` and
* saves the result to prevent multiple executions. * saves the result to prevent multiple executions.
*/ */
case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan { private[sql] case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
/** /**
* A concrete command should override this lazy field to wrap up any side effects caused by the * A concrete command should override this lazy field to wrap up any side effects caused by the
* command or any other computation that should be evaluated exactly once. The value of this field * command or any other computation that should be evaluated exactly once. The value of this field
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment