From e78ec1a8fabfe409c92c4904208f53dbdcfcf139 Mon Sep 17 00:00:00 2001
From: Josh Rosen <joshrosen@databricks.com>
Date: Tue, 28 Jul 2015 17:51:58 -0700
Subject: [PATCH] [SPARK-9421] Fix null-handling bugs in UnsafeRow.getDouble,
 getFloat(), and get(ordinal, dataType)

UnsafeRow.getDouble and getFloat() return NaN when called on columns that are null, which is inconsistent with the behavior of other row classes (which is to return 0.0).

In addition, the generic get(ordinal, dataType) method should always return null for a null literal, but currently it handles nulls by calling the type-specific accessors.

This patch addresses both of these issues and adds a regression test.

Author: Josh Rosen <joshrosen@databricks.com>

Closes #7736 from JoshRosen/unsafe-row-null-fixes and squashes the following commits:

c8eb2ee [Josh Rosen] Fix test in UnsafeRowConverterSuite
6214682 [Josh Rosen] Fixes to null handling in UnsafeRow
---
 .../sql/catalyst/expressions/UnsafeRow.java     | 14 +++-----------
 .../expressions/UnsafeRowConverterSuite.scala   |  4 ++--
 .../org/apache/spark/sql/UnsafeRowSuite.scala   | 17 ++++++++++++++++-
 3 files changed, 21 insertions(+), 14 deletions(-)

diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index 955fb4226f..64a8edc34d 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -239,7 +239,7 @@ public final class UnsafeRow extends MutableRow {
 
   @Override
   public Object get(int ordinal, DataType dataType) {
-    if (dataType instanceof NullType) {
+    if (isNullAt(ordinal) || dataType instanceof NullType) {
       return null;
     } else if (dataType instanceof BooleanType) {
       return getBoolean(ordinal);
@@ -313,21 +313,13 @@ public final class UnsafeRow extends MutableRow {
   @Override
   public float getFloat(int ordinal) {
     assertIndexIsValid(ordinal);
-    if (isNullAt(ordinal)) {
-      return Float.NaN;
-    } else {
-      return PlatformDependent.UNSAFE.getFloat(baseObject, getFieldOffset(ordinal));
-    }
+    return PlatformDependent.UNSAFE.getFloat(baseObject, getFieldOffset(ordinal));
   }
 
   @Override
   public double getDouble(int ordinal) {
     assertIndexIsValid(ordinal);
-    if (isNullAt(ordinal)) {
-      return Float.NaN;
-    } else {
-      return PlatformDependent.UNSAFE.getDouble(baseObject, getFieldOffset(ordinal));
-    }
+    return PlatformDependent.UNSAFE.getDouble(baseObject, getFieldOffset(ordinal));
   }
 
   @Override
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
index 2834b54e8f..b7bc17f89e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
@@ -146,8 +146,8 @@ class UnsafeRowConverterSuite extends SparkFunSuite with Matchers {
     assert(createdFromNull.getShort(3) === 0)
     assert(createdFromNull.getInt(4) === 0)
     assert(createdFromNull.getLong(5) === 0)
-    assert(java.lang.Float.isNaN(createdFromNull.getFloat(6)))
-    assert(java.lang.Double.isNaN(createdFromNull.getDouble(7)))
+    assert(createdFromNull.getFloat(6) === 0.0f)
+    assert(createdFromNull.getDouble(7) === 0.0d)
     assert(createdFromNull.getUTF8String(8) === null)
     assert(createdFromNull.getBinary(9) === null)
     // assert(createdFromNull.get(10) === null)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
index ad3bb1744c..e72a1bc6c4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
@@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.{UnsafeRow, UnsafeProjection}
-import org.apache.spark.sql.types.{DataType, IntegerType, StringType}
+import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.PlatformDependent
 import org.apache.spark.unsafe.memory.MemoryAllocator
 import org.apache.spark.unsafe.types.UTF8String
@@ -67,4 +67,19 @@ class UnsafeRowSuite extends SparkFunSuite {
 
     assert(bytesFromArrayBackedRow === bytesFromOffheapRow)
   }
+
+  test("calling getDouble() and getFloat() on null columns") {
+    val row = InternalRow.apply(null, null)
+    val unsafeRow = UnsafeProjection.create(Array[DataType](FloatType, DoubleType)).apply(row)
+    assert(unsafeRow.getFloat(0) === row.getFloat(0))
+    assert(unsafeRow.getDouble(1) === row.getDouble(1))
+  }
+
+  test("calling get(ordinal, datatype) on null columns") {
+    val row = InternalRow.apply(null)
+    val unsafeRow = UnsafeProjection.create(Array[DataType](NullType)).apply(row)
+    for (dataType <- DataTypeTestUtils.atomicTypes) {
+      assert(unsafeRow.get(0, dataType) === null)
+    }
+  }
 }
-- 
GitLab