Skip to content
Snippets Groups Projects
Commit 860a49ef authored by Wenchen Fan's avatar Wenchen Fan Committed by Michael Armbrust
Browse files

[SPARK-7153] [SQL] support all integral type ordinal in GetArrayItem

first convert `ordinal` to `Number`, then convert to int type.

Author: Wenchen Fan <cloud0fan@outlook.com>

Closes #5706 from cloud-fan/7153 and squashes the following commits:

915db79 [Wenchen Fan] fix 7153
parent 1dfb0f7b
No related branches found
No related tags found
No related merge requests found
......@@ -186,7 +186,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
// TODO: consider using Array[_] for ArrayType child to avoid
// boxing of primitives
val baseValue = value.asInstanceOf[Seq[_]]
val index = ordinal.asInstanceOf[Int]
val index = ordinal.asInstanceOf[Number].intValue()
if (index >= baseValue.size || index < 0) {
null
} else {
......
......@@ -17,7 +17,6 @@
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst
import org.apache.spark.sql.types._
......
......@@ -26,6 +26,26 @@ import org.apache.spark.unsafe.types.UTF8String
class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper {
/**
* Runs through the testFunc for all integral data types.
*
* @param testFunc a test function that accepts a conversion function to convert an integer
* into another data type.
*/
private def testIntegralDataTypes(testFunc: (Int => Any) => Unit): Unit = {
testFunc(_.toByte)
testFunc(_.toShort)
testFunc(identity)
testFunc(_.toLong)
}
test("GetArrayItem") {
testIntegralDataTypes { convert =>
val array = Literal.create(Seq("a", "b"), ArrayType(StringType))
checkEvaluation(GetArrayItem(array, Literal(convert(1))), "b")
}
}
test("CreateStruct") {
val row = InternalRow(1, 2, 3)
val c1 = 'a.int.at(0).as("a")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment