Skip to content
Snippets Groups Projects
Commit 1ee494d0 authored by hyukjinkwon's avatar hyukjinkwon Committed by Herman van Hovell
Browse files

[SPARK-20492][SQL] Do not print empty parentheses for invalid primitive types in parser

## What changes were proposed in this pull request?

Currently, when the type string is invalid, it looks printing empty parentheses. This PR proposes a small improvement in an error message by removing it in the parse as below:

```scala
spark.range(1).select($"col".cast("aa"))
```

**Before**

```
org.apache.spark.sql.catalyst.parser.ParseException:
DataType aa() is not supported.(line 1, pos 0)

== SQL ==
aa
^^^
```

**After**

```
org.apache.spark.sql.catalyst.parser.ParseException:
DataType aa is not supported.(line 1, pos 0)

== SQL ==
aa
^^^
```

## How was this patch tested?

Unit tests in `DataTypeParserSuite`.

Author: hyukjinkwon <gurwls223@gmail.com>

Closes #17784 from HyukjinKwon/SPARK-20492.
parent 4d99b95a
No related branches found
No related tags found
No related merge requests found
......@@ -1491,8 +1491,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
case ("decimal", precision :: scale :: Nil) =>
DecimalType(precision.getText.toInt, scale.getText.toInt)
case (dt, params) =>
throw new ParseException(
s"DataType $dt${params.mkString("(", ",", ")")} is not supported.", ctx)
val dtStr = if (params.nonEmpty) s"$dt(${params.mkString(",")})" else dt
throw new ParseException(s"DataType $dtStr is not supported.", ctx)
}
}
......
......@@ -30,7 +30,7 @@ class DataTypeParserSuite extends SparkFunSuite {
}
}
def intercept(sql: String): Unit =
def intercept(sql: String): ParseException =
intercept[ParseException](CatalystSqlParser.parseDataType(sql))
def unsupported(dataTypeString: String): Unit = {
......@@ -118,6 +118,11 @@ class DataTypeParserSuite extends SparkFunSuite {
unsupported("struct<x: int")
unsupported("struct<x int, y string>")
test("Do not print empty parentheses for no params") {
assert(intercept("unkwon").getMessage.contains("unkwon is not supported"))
assert(intercept("unkwon(1,2,3)").getMessage.contains("unkwon(1,2,3) is not supported"))
}
// DataType parser accepts certain reserved keywords.
checkDataType(
"Struct<TABLE: string, DATE:boolean>",
......
......@@ -141,7 +141,7 @@ struct<>
-- !query 13 output
org.apache.spark.sql.AnalysisException
DataType invalidtype() is not supported.(line 1, pos 2)
DataType invalidtype is not supported.(line 1, pos 2)
== SQL ==
a InvalidType
......
......@@ -274,7 +274,7 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
val errMsg2 = intercept[AnalysisException] {
df3.selectExpr("""from_json(value, 'time InvalidType')""")
}
assert(errMsg2.getMessage.contains("DataType invalidtype() is not supported"))
assert(errMsg2.getMessage.contains("DataType invalidtype is not supported"))
val errMsg3 = intercept[AnalysisException] {
df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 1))")
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment