Skip to content
Snippets Groups Projects
Commit 6091e91f authored by Reynold Xin's avatar Reynold Xin
Browse files

Revert "[SPARK-11469][SQL] Allow users to define nondeterministic udfs."

This reverts commit 9cf56c96.
parent 07414afa
No related branches found
No related tags found
No related merge requests found
......@@ -114,53 +114,6 @@ object MimaExcludes {
"org.apache.spark.rdd.MapPartitionsWithPreparationRDD"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.rdd.MapPartitionsWithPreparationRDD$")
) ++ Seq(
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$2"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$3"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$4"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$5"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$6"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$7"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$8"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$9"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$10"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$11"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$12"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$13"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$14"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$15"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$16"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$17"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$18"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$19"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$20"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$21"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$22"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$23"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.UDFRegistration.org$apache$spark$sql$UDFRegistration$$builder$24")
) ++ Seq(
// SPARK-11485
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.DataFrameHolder.df")
......
......@@ -30,18 +30,13 @@ case class ScalaUDF(
function: AnyRef,
dataType: DataType,
children: Seq[Expression],
inputTypes: Seq[DataType] = Nil,
isDeterministic: Boolean = true)
inputTypes: Seq[DataType] = Nil)
extends Expression with ImplicitCastInputTypes with CodegenFallback {
override def nullable: Boolean = true
override def toString: String = s"UDF(${children.mkString(",")})"
override def foldable: Boolean = deterministic && children.forall(_.foldable)
override def deterministic: Boolean = isDeterministic && children.forall(_.deterministic)
// scalastyle:off
/** This method has been generated by this script
......
......@@ -44,20 +44,11 @@ import org.apache.spark.sql.types.DataType
case class UserDefinedFunction protected[sql] (
f: AnyRef,
dataType: DataType,
inputTypes: Seq[DataType] = Nil,
deterministic: Boolean = true) {
inputTypes: Seq[DataType] = Nil) {
def apply(exprs: Column*): Column = {
Column(ScalaUDF(f, dataType, exprs.map(_.expr), inputTypes, deterministic))
Column(ScalaUDF(f, dataType, exprs.map(_.expr), inputTypes))
}
protected[sql] def builder: Seq[Expression] => ScalaUDF = {
(exprs: Seq[Expression]) =>
ScalaUDF(f, dataType, exprs, inputTypes, deterministic)
}
def nondeterministic: UserDefinedFunction =
UserDefinedFunction(f, dataType, inputTypes, deterministic = false)
}
/**
......
......@@ -17,8 +17,6 @@
package org.apache.spark.sql
import org.apache.spark.sql.catalyst.expressions.ScalaUDF
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.test.SQLTestData._
......@@ -193,107 +191,4 @@ class UDFSuite extends QueryTest with SharedSQLContext {
// pass a decimal to intExpected.
assert(sql("SELECT intExpected(1.0)").head().getInt(0) === 1)
}
private def checkNumUDFs(df: DataFrame, expectedNumUDFs: Int): Unit = {
val udfs = df.queryExecution.optimizedPlan.collect {
case p: logical.Project => p.projectList.flatMap {
case e => e.collect {
case udf: ScalaUDF => udf
}
}
}.flatten
assert(udfs.length === expectedNumUDFs)
}
test("foldable udf") {
import org.apache.spark.sql.functions._
val myUDF = udf((x: Int) => x + 1)
{
val df = sql("SELECT 1 as a")
.select(col("a"), myUDF(col("a")).as("b"))
.select(col("a"), col("b"), myUDF(col("b")).as("c"))
checkNumUDFs(df, 0)
checkAnswer(df, Row(1, 2, 3))
}
}
test("nondeterministic udf: using UDFRegistration") {
import org.apache.spark.sql.functions._
val myUDF = sqlContext.udf.register("plusOne1", (x: Int) => x + 1)
sqlContext.udf.register("plusOne2", myUDF.nondeterministic)
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), myUDF(col("a")).as("b"))
.select(col("a"), col("b"), myUDF(col("b")).as("c"))
checkNumUDFs(df, 3)
checkAnswer(df, Row(1, 2, 3))
}
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), callUDF("plusOne1", col("a")).as("b"))
.select(col("a"), col("b"), callUDF("plusOne1", col("b")).as("c"))
checkNumUDFs(df, 3)
checkAnswer(df, Row(1, 2, 3))
}
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), myUDF.nondeterministic(col("a")).as("b"))
.select(col("a"), col("b"), myUDF.nondeterministic(col("b")).as("c"))
checkNumUDFs(df, 2)
checkAnswer(df, Row(1, 2, 3))
}
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), callUDF("plusOne2", col("a")).as("b"))
.select(col("a"), col("b"), callUDF("plusOne2", col("b")).as("c"))
checkNumUDFs(df, 2)
checkAnswer(df, Row(1, 2, 3))
}
}
test("nondeterministic udf: using udf function") {
import org.apache.spark.sql.functions._
val myUDF = udf((x: Int) => x + 1)
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), myUDF(col("a")).as("b"))
.select(col("a"), col("b"), myUDF(col("b")).as("c"))
checkNumUDFs(df, 3)
checkAnswer(df, Row(1, 2, 3))
}
{
val df = sqlContext.range(1, 2).select(col("id").as("a"))
.select(col("a"), myUDF.nondeterministic(col("a")).as("b"))
.select(col("a"), col("b"), myUDF.nondeterministic(col("b")).as("c"))
checkNumUDFs(df, 2)
checkAnswer(df, Row(1, 2, 3))
}
{
// nondeterministicUDF will not be foldable.
val df = sql("SELECT 1 as a")
.select(col("a"), myUDF.nondeterministic(col("a")).as("b"))
.select(col("a"), col("b"), myUDF.nondeterministic(col("b")).as("c"))
checkNumUDFs(df, 2)
checkAnswer(df, Row(1, 2, 3))
}
}
test("override a registered udf") {
sqlContext.udf.register("intExpected", (x: Int) => x)
assert(sql("SELECT intExpected(1.0)").head().getInt(0) === 1)
sqlContext.udf.register("intExpected", (x: Int) => x + 1)
assert(sql("SELECT intExpected(1.0)").head().getInt(0) === 2)
}
}
......@@ -381,7 +381,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext {
sqlContext.udf.register("div0", (x: Int) => x / 0)
withTempPath { dir =>
intercept[org.apache.spark.SparkException] {
sqlContext.range(1, 2).selectExpr("div0(id) as a").write.parquet(dir.getCanonicalPath)
sqlContext.sql("select div0(1)").write.parquet(dir.getCanonicalPath)
}
val path = new Path(dir.getCanonicalPath, "_temporary")
val fs = path.getFileSystem(hadoopConfiguration)
......@@ -405,7 +405,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext {
sqlContext.udf.register("div0", (x: Int) => x / 0)
withTempPath { dir =>
intercept[org.apache.spark.SparkException] {
sqlContext.range(1, 2).selectExpr("div0(id) as a").write.parquet(dir.getCanonicalPath)
sqlContext.sql("select div0(1)").write.parquet(dir.getCanonicalPath)
}
val path = new Path(dir.getCanonicalPath, "_temporary")
val fs = path.getFileSystem(hadoopConfiguration)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment