From f84b228c4002073ee4ff53be50462a63f48bd508 Mon Sep 17 00:00:00 2001
From: Venkata Ramana Gollamudi <ramana.gollamudi@huawei.com>
Date: Wed, 1 Oct 2014 15:57:06 -0700
Subject: [PATCH] [SPARK-3593][SQL] Add support for sorting BinaryType

BinaryType is derived from NativeType and added Ordering support.

Author: Venkata Ramana G <ramana.gollamudihuawei.com>

Author: Venkata Ramana Gollamudi <ramana.gollamudi@huawei.com>

Closes #2617 from gvramana/binarytype_sort and squashes the following commits:

1cf26f3 [Venkata Ramana Gollamudi] Supported Sorting of BinaryType
---
 .../apache/spark/sql/catalyst/types/dataTypes.scala  | 12 +++++++++++-
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala   |  8 ++++++++
 .../test/scala/org/apache/spark/sql/TestData.scala   | 10 ++++++++++
 3 files changed, 29 insertions(+), 1 deletion(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
index c7d73d3990..ac043d4dd8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
@@ -157,8 +157,18 @@ case object StringType extends NativeType with PrimitiveType {
   def simpleString: String = "string"
 }
 
-case object BinaryType extends DataType with PrimitiveType {
+case object BinaryType extends NativeType with PrimitiveType {
   private[sql] type JvmType = Array[Byte]
+  @transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[JvmType] }
+  private[sql] val ordering = new Ordering[JvmType] {
+    def compare(x: Array[Byte], y: Array[Byte]): Int = {
+      for (i <- 0 until x.length; if i < y.length) {
+        val res = x(i).compareTo(y(i))
+        if (res != 0) return res
+      }
+      return x.length - y.length
+    }
+  }
   def simpleString: String = "binary"
 }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 08376eb5e5..fdf3a229a7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -190,6 +190,14 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
       sql("SELECT * FROM testData2 ORDER BY a DESC, b ASC"),
       Seq((3,1), (3,2), (2,1), (2,2), (1,1), (1,2)))
 
+    checkAnswer(
+      sql("SELECT b FROM binaryData ORDER BY a ASC"),
+      (1 to 5).map(Row(_)).toSeq)
+
+    checkAnswer(
+      sql("SELECT b FROM binaryData ORDER BY a DESC"),
+      (1 to 5).map(Row(_)).toSeq.reverse)
+
     checkAnswer(
       sql("SELECT * FROM arrayData ORDER BY data[0] ASC"),
       arrayData.collect().sortBy(_.data(0)).toSeq)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index eb33a61c6e..10b7979df7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -54,6 +54,16 @@ object TestData {
       TestData2(3, 2) :: Nil)
   testData2.registerTempTable("testData2")
 
+  case class BinaryData(a: Array[Byte], b: Int)
+  val binaryData: SchemaRDD =
+    TestSQLContext.sparkContext.parallelize(
+      BinaryData("12".getBytes(), 1) ::
+      BinaryData("22".getBytes(), 5) ::
+      BinaryData("122".getBytes(), 3) ::
+      BinaryData("121".getBytes(), 2) ::
+      BinaryData("123".getBytes(), 4) :: Nil)
+  binaryData.registerTempTable("binaryData")
+
   // TODO: There is no way to express null primitives as case classes currently...
   val testData3 =
     logical.LocalRelation('a.int, 'b.int).loadData(
-- 
GitLab