diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index abb5594bfa7f89f1477cdde5a3fc9174378bd0ad..0c256c3d890f196d39d7382636399cfdd7cd6997 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -260,7 +260,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression) * We need to do type checking here as `key` expression maybe unresolved. */ case class GetMapValue(child: Expression, key: Expression) - extends BinaryExpression with ExpectsInputTypes with ExtractValue { + extends BinaryExpression with ImplicitCastInputTypes with ExtractValue { private def keyType = child.dataType.asInstanceOf[MapType].keyType diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index 5e08ef31121fda37a7f36e6793eae24a9e82e452..c21db3595fa19b268e1519da458bf6717f5753fb 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -1939,6 +1939,18 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } } + + test("SPARK-17108: Fix BIGINT and INT comparison failure in spark sql") { + sql("create table t1(a map<bigint, array<string>>)") + sql("select * from t1 where a[1] is not null") + + sql("create table t2(a map<int, array<string>>)") + sql("select * from t2 where a[1] is not null") + + sql("create table t3(a map<bigint, array<string>>)") + sql("select * from t3 where a[1L] is not null") + } + test("SPARK-17796 Support wildcard character in filename for LOAD DATA LOCAL INPATH") { withTempDir { dir => for (i <- 1 to 3) {