Skip to content
Snippets Groups Projects
Commit c377e49e authored by Reynold Xin's avatar Reynold Xin
Browse files

[SPARK-16489][SQL] Guard against variable reuse mistakes in expression code generation

## What changes were proposed in this pull request?
In code generation, it is incorrect for expressions to reuse variable names across different instances of itself. As an example, SPARK-16488 reports a bug in which pmod expression reuses variable name "r".

This patch updates ExpressionEvalHelper test harness to always project two instances of the same expression, which will help us catch variable reuse problems in expression unit tests. This patch also fixes the bug in crc32 expression.

## How was this patch tested?
This is a test harness change, but I also created a new test suite for testing the test harness.

Author: Reynold Xin <rxin@databricks.com>

Closes #14146 from rxin/SPARK-16489.
parent 5ad68ba5
No related branches found
No related tags found
No related merge requests found
......@@ -175,11 +175,12 @@ case class Crc32(child: Expression) extends UnaryExpression with ImplicitCastInp
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val CRC32 = "java.util.zip.CRC32"
val checksum = ctx.freshName("checksum")
nullSafeCodeGen(ctx, ev, value => {
s"""
$CRC32 checksum = new $CRC32();
checksum.update($value, 0, $value.length);
${ev.value} = checksum.getValue();
$CRC32 $checksum = new $CRC32();
$checksum.update($value, 0, $value.length);
${ev.value} = $checksum.getValue();
"""
})
}
......
......@@ -132,9 +132,13 @@ trait ExpressionEvalHelper extends GeneratorDrivenPropertyChecks {
expression: Expression,
expected: Any,
inputRow: InternalRow = EmptyRow): Unit = {
// SPARK-16489 Explicitly doing code generation twice so code gen will fail if
// some expression is reusing variable names across different instances.
// This behavior is tested in ExpressionEvalHelperSuite.
val plan = generateProject(
GenerateUnsafeProjection.generate(Alias(expression, s"Optimized($expression)")() :: Nil),
GenerateUnsafeProjection.generate(
Alias(expression, s"Optimized($expression)1")() ::
Alias(expression, s"Optimized($expression)2")() :: Nil),
expression)
val unsafeRow = plan(inputRow)
......@@ -142,13 +146,14 @@ trait ExpressionEvalHelper extends GeneratorDrivenPropertyChecks {
if (expected == null) {
if (!unsafeRow.isNullAt(0)) {
val expectedRow = InternalRow(expected)
val expectedRow = InternalRow(expected, expected)
fail("Incorrect evaluation in unsafe mode: " +
s"$expression, actual: $unsafeRow, expected: $expectedRow$input")
}
} else {
val lit = InternalRow(expected)
val expectedRow = UnsafeProjection.create(Array(expression.dataType)).apply(lit)
val lit = InternalRow(expected, expected)
val expectedRow =
UnsafeProjection.create(Array(expression.dataType, expression.dataType)).apply(lit)
if (unsafeRow != expectedRow) {
fail("Incorrect evaluation in unsafe mode: " +
s"$expression, actual: $unsafeRow, expected: $expectedRow$input")
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.types.{DataType, IntegerType}
/**
* A test suite for testing [[ExpressionEvalHelper]].
*
* Yes, we should write test cases for test harnesses, in case
* they have behaviors that are easy to break.
*/
class ExpressionEvalHelperSuite extends SparkFunSuite with ExpressionEvalHelper {
test("SPARK-16489 checkEvaluation should fail if expression reuses variable names") {
val e = intercept[RuntimeException] { checkEvaluation(BadCodegenExpression(), 10) }
assert(e.getMessage.contains("some_variable"))
}
}
/**
* An expression that generates bad code (variable name "some_variable" is not unique across
* instances of the expression.
*/
case class BadCodegenExpression() extends LeafExpression {
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = 10
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
ev.copy(code =
s"""
|int some_variable = 11;
|int ${ev.value} = 10;
""".stripMargin)
}
override def dataType: DataType = IntegerType
}
......@@ -449,20 +449,6 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSQLContext with Be
}
}
test("pmod with partitionBy") {
val spark = this.spark
import spark.implicits._
case class Test(a: Int, b: String)
val data = Seq((0, "a"), (1, "b"), (1, "a"))
spark.createDataset(data).createOrReplaceTempView("test")
sql("select * from test distribute by pmod(_1, 2)")
.write
.partitionBy("_2")
.mode("overwrite")
.parquet(dir)
}
private def testRead(
df: => DataFrame,
expectedResult: Seq[String],
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment