Skip to content
Snippets Groups Projects
Commit 6a05eb24 authored by Dongjoon Hyun's avatar Dongjoon Hyun Committed by Herman van Hovell
Browse files

[SPARK-17328][SQL] Fix NPE with EXPLAIN DESCRIBE TABLE

## What changes were proposed in this pull request?

This PR fixes the following NPE scenario in two ways.

**Reported Error Scenario**
```scala
scala> sql("EXPLAIN DESCRIBE TABLE x").show(truncate = false)
INFO SparkSqlParser: Parsing command: EXPLAIN DESCRIBE TABLE x
java.lang.NullPointerException
```

- **DESCRIBE**: Extend `DESCRIBE` syntax to accept `TABLE`.
- **EXPLAIN**: Prevent NPE in case of the parsing failure of target statement, e.g., `EXPLAIN DESCRIBE TABLES x`.

## How was this patch tested?

Pass the Jenkins test with a new test case.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #15357 from dongjoon-hyun/SPARK-17328.
parent 89516c1c
No related branches found
No related tags found
No related merge requests found
......@@ -136,7 +136,7 @@ statement
| SHOW CREATE TABLE tableIdentifier #showCreateTable
| (DESC | DESCRIBE) FUNCTION EXTENDED? describeFuncName #describeFunction
| (DESC | DESCRIBE) DATABASE EXTENDED? identifier #describeDatabase
| (DESC | DESCRIBE) option=(EXTENDED | FORMATTED)?
| (DESC | DESCRIBE) TABLE? option=(EXTENDED | FORMATTED)?
tableIdentifier partitionSpec? describeColName? #describeTable
| REFRESH TABLE tableIdentifier #refreshTable
| REFRESH .*? #refreshResource
......
......@@ -265,7 +265,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
val statement = plan(ctx.statement)
if (isExplainableStatement(statement)) {
if (statement == null) {
null // This is enough since ParseException will raise later.
} else if (isExplainableStatement(statement)) {
ExplainCommand(statement, extended = ctx.EXTENDED != null, codegen = ctx.CODEGEN != null)
} else {
ExplainCommand(OneRowRelation)
......
......@@ -2,8 +2,12 @@ CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d STRING);
ALTER TABLE t ADD PARTITION (c='Us', d=1);
DESCRIBE t;
DESC t;
DESC TABLE t;
-- Ignore these because there exist timestamp results, e.g., `Create Table`.
-- DESC EXTENDED t;
-- DESC FORMATTED t;
......
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 8
-- Number of queries: 10
-- !query 0
......@@ -19,7 +19,7 @@ struct<>
-- !query 2
DESC t
DESCRIBE t
-- !query 2 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 2 output
......@@ -34,7 +34,7 @@ d string
-- !query 3
DESC t PARTITION (c='Us', d=1)
DESC t
-- !query 3 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 3 output
......@@ -49,30 +49,60 @@ d string
-- !query 4
DESC t PARTITION (c='Us', d=2)
DESC TABLE t
-- !query 4 schema
struct<>
struct<col_name:string,data_type:string,comment:string>
-- !query 4 output
# Partition Information
# col_name data_type comment
a string
b int
c string
c string
d string
d string
-- !query 5
DESC t PARTITION (c='Us', d=1)
-- !query 5 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 5 output
# Partition Information
# col_name data_type comment
a string
b int
c string
c string
d string
d string
-- !query 6
DESC t PARTITION (c='Us', d=2)
-- !query 6 schema
struct<>
-- !query 6 output
org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException
Partition not found in table 't' database 'default':
c -> Us
d -> 2;
-- !query 5
-- !query 7
DESC t PARTITION (c='Us')
-- !query 5 schema
-- !query 7 schema
struct<>
-- !query 5 output
-- !query 7 output
org.apache.spark.sql.AnalysisException
Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`default`.`t`';
-- !query 6
-- !query 8
DESC t PARTITION (c='Us', d)
-- !query 6 schema
-- !query 8 schema
struct<>
-- !query 6 output
-- !query 8 output
org.apache.spark.sql.catalyst.parser.ParseException
PARTITION specification is incomplete: `d`(line 1, pos 0)
......@@ -82,9 +112,9 @@ DESC t PARTITION (c='Us', d)
^^^
-- !query 7
-- !query 9
DROP TABLE t
-- !query 7 schema
-- !query 9 schema
struct<>
-- !query 7 output
-- !query 9 output
......@@ -17,11 +17,12 @@
package org.apache.spark.sql.execution
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.command.{DescribeFunctionCommand, ShowFunctionsCommand}
import org.apache.spark.sql.execution.command.{DescribeFunctionCommand, DescribeTableCommand,
ShowFunctionsCommand}
import org.apache.spark.sql.internal.SQLConf
/**
......@@ -72,4 +73,17 @@ class SparkSqlParserSuite extends PlanTest {
DescribeFunctionCommand(FunctionIdentifier("bar", database = Option("f")), isExtended = true))
}
test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
assertEqual("describe table t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = false))
assertEqual("describe table extended t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = true, isFormatted = false))
assertEqual("describe table formatted t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = true))
intercept("explain describe tables x", "Unsupported SQL statement")
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment