Skip to content
Snippets Groups Projects
Commit e78ec1a8 authored by Josh Rosen's avatar Josh Rosen
Browse files

[SPARK-9421] Fix null-handling bugs in UnsafeRow.getDouble, getFloat(), and get(ordinal, dataType)

UnsafeRow.getDouble and getFloat() return NaN when called on columns that are null, which is inconsistent with the behavior of other row classes (which is to return 0.0).

In addition, the generic get(ordinal, dataType) method should always return null for a null literal, but currently it handles nulls by calling the type-specific accessors.

This patch addresses both of these issues and adds a regression test.

Author: Josh Rosen <joshrosen@databricks.com>

Closes #7736 from JoshRosen/unsafe-row-null-fixes and squashes the following commits:

c8eb2ee [Josh Rosen] Fix test in UnsafeRowConverterSuite
6214682 [Josh Rosen] Fixes to null handling in UnsafeRow
parent 6662ee21
No related branches found
No related tags found
No related merge requests found
......@@ -239,7 +239,7 @@ public final class UnsafeRow extends MutableRow {
@Override
public Object get(int ordinal, DataType dataType) {
if (dataType instanceof NullType) {
if (isNullAt(ordinal) || dataType instanceof NullType) {
return null;
} else if (dataType instanceof BooleanType) {
return getBoolean(ordinal);
......@@ -313,21 +313,13 @@ public final class UnsafeRow extends MutableRow {
@Override
public float getFloat(int ordinal) {
assertIndexIsValid(ordinal);
if (isNullAt(ordinal)) {
return Float.NaN;
} else {
return PlatformDependent.UNSAFE.getFloat(baseObject, getFieldOffset(ordinal));
}
return PlatformDependent.UNSAFE.getFloat(baseObject, getFieldOffset(ordinal));
}
@Override
public double getDouble(int ordinal) {
assertIndexIsValid(ordinal);
if (isNullAt(ordinal)) {
return Float.NaN;
} else {
return PlatformDependent.UNSAFE.getDouble(baseObject, getFieldOffset(ordinal));
}
return PlatformDependent.UNSAFE.getDouble(baseObject, getFieldOffset(ordinal));
}
@Override
......
......@@ -146,8 +146,8 @@ class UnsafeRowConverterSuite extends SparkFunSuite with Matchers {
assert(createdFromNull.getShort(3) === 0)
assert(createdFromNull.getInt(4) === 0)
assert(createdFromNull.getLong(5) === 0)
assert(java.lang.Float.isNaN(createdFromNull.getFloat(6)))
assert(java.lang.Double.isNaN(createdFromNull.getDouble(7)))
assert(createdFromNull.getFloat(6) === 0.0f)
assert(createdFromNull.getDouble(7) === 0.0d)
assert(createdFromNull.getUTF8String(8) === null)
assert(createdFromNull.getBinary(9) === null)
// assert(createdFromNull.get(10) === null)
......
......@@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{UnsafeRow, UnsafeProjection}
import org.apache.spark.sql.types.{DataType, IntegerType, StringType}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.PlatformDependent
import org.apache.spark.unsafe.memory.MemoryAllocator
import org.apache.spark.unsafe.types.UTF8String
......@@ -67,4 +67,19 @@ class UnsafeRowSuite extends SparkFunSuite {
assert(bytesFromArrayBackedRow === bytesFromOffheapRow)
}
test("calling getDouble() and getFloat() on null columns") {
val row = InternalRow.apply(null, null)
val unsafeRow = UnsafeProjection.create(Array[DataType](FloatType, DoubleType)).apply(row)
assert(unsafeRow.getFloat(0) === row.getFloat(0))
assert(unsafeRow.getDouble(1) === row.getDouble(1))
}
test("calling get(ordinal, datatype) on null columns") {
val row = InternalRow.apply(null)
val unsafeRow = UnsafeProjection.create(Array[DataType](NullType)).apply(row)
for (dataType <- DataTypeTestUtils.atomicTypes) {
assert(unsafeRow.get(0, dataType) === null)
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment