Skip to content
Snippets Groups Projects
Commit 3e831a26 authored by Takeshi YAMAMURO's avatar Takeshi YAMAMURO Committed by Michael Armbrust
Browse files

[SPARK-6912] [SQL] Throw an AnalysisException when unsupported Java Map<K,V> types used in Hive UDF

To make UDF developers understood, throw an exception when unsupported Map<K,V> types used in Hive UDF. This fix is the same with #7248.

Author: Takeshi YAMAMURO <linguin.m.s@gmail.com>

Closes #7257 from maropu/ThrowExceptionWhenMapUsed and squashes the following commits:

916099a [Takeshi YAMAMURO] Fix style errors
7886dcc [Takeshi YAMAMURO] Throw an exception when Map<> used in Hive UDF
parent 6722aca8
No related branches found
No related tags found
No related merge requests found
......@@ -226,6 +226,12 @@ private[hive] trait HiveInspectors {
"List type in java is unsupported because " +
"JVM type erasure makes spark fail to catch a component type in List<>")
// java map type unsupported
case c: Class[_] if c == classOf[java.util.Map[_, _]] =>
throw new AnalysisException(
"Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>")
case c => throw new AnalysisException(s"Unsupported java type $c")
}
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution;
import org.apache.hadoop.hive.ql.exec.UDF;
import java.util.HashMap;
import java.util.Map;
public class UDFToIntIntMap extends UDF {
public Map<Integer, Integer> evaluate(Object o) {
return new HashMap<Integer, Integer>() {
{
put(1, 1);
put(2, 1);
put(3, 1);
}
};
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution;
import org.apache.hadoop.hive.ql.exec.UDF;
import java.util.HashMap;
import java.util.Map;
public class UDFToStringIntMap extends UDF {
public Map<String, Integer> evaluate(Object o) {
return new HashMap<String, Integer>() {
{
put("key1", 1);
put("key2", 2);
put("key3", 3);
}
};
}
}
......@@ -163,6 +163,38 @@ class HiveUDFSuite extends QueryTest {
TestHive.reset()
}
test("UDFToStringIntMap") {
val testData = TestHive.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToStringIntMap " +
s"AS '${classOf[UDFToStringIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToStringIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage === "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToStringIntMap")
TestHive.reset()
}
test("UDFToIntIntMap") {
val testData = TestHive.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToIntIntMap " +
s"AS '${classOf[UDFToIntIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToIntIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage === "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToIntIntMap")
TestHive.reset()
}
test("UDFListListInt") {
val testData = TestHive.sparkContext.parallelize(
ListListIntCaseClass(Nil) ::
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment