Skip to content
Snippets Groups Projects
Commit 92b7e572 authored by Dongjoon Hyun's avatar Dongjoon Hyun Committed by gatorsmile
Browse files

[SPARK-17750][SQL] Fix CREATE VIEW with INTERVAL arithmetic.

## What changes were proposed in this pull request?

Currently, Spark raises `RuntimeException` when creating a view with timestamp with INTERVAL arithmetic like the following. The root cause is the arithmetic expression, `TimeAdd`, was transformed into `timeadd` function as a VIEW definition. This PR fixes the SQL definition of `TimeAdd` and `TimeSub` expressions.

```scala
scala> sql("CREATE TABLE dates (ts TIMESTAMP)")

scala> sql("CREATE VIEW view1 AS SELECT ts + INTERVAL 1 DAY FROM dates")
java.lang.RuntimeException: Failed to analyze the canonicalized SQL: ...
```

## How was this patch tested?

Pass Jenkins with a new testcase.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #15318 from dongjoon-hyun/SPARK-17750.
parent 5e9f32dd
No related branches found
No related tags found
No related merge requests found
...@@ -682,6 +682,7 @@ case class TimeAdd(start: Expression, interval: Expression) ...@@ -682,6 +682,7 @@ case class TimeAdd(start: Expression, interval: Expression)
override def right: Expression = interval override def right: Expression = interval
override def toString: String = s"$left + $right" override def toString: String = s"$left + $right"
override def sql: String = s"${left.sql} + ${right.sql}"
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType) override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType)
override def dataType: DataType = TimestampType override def dataType: DataType = TimestampType
...@@ -762,6 +763,7 @@ case class TimeSub(start: Expression, interval: Expression) ...@@ -762,6 +763,7 @@ case class TimeSub(start: Expression, interval: Expression)
override def right: Expression = interval override def right: Expression = interval
override def toString: String = s"$left - $right" override def toString: String = s"$left - $right"
override def sql: String = s"${left.sql} - ${right.sql}"
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType) override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType)
override def dataType: DataType = TimestampType override def dataType: DataType = TimestampType
......
-- This file is automatically generated by LogicalPlanToSQLSuite.
select ts + interval 1 day, ts + interval 2 days,
ts - interval 1 day, ts - interval 2 days,
ts + interval '1' day, ts + interval '2' days,
ts - interval '1' day, ts - interval '2' days
from dates
--------------------------------------------------------------------------------
SELECT `gen_attr_0` AS `CAST(ts + interval 1 days AS TIMESTAMP)`, `gen_attr_2` AS `CAST(ts + interval 2 days AS TIMESTAMP)`, `gen_attr_3` AS `CAST(ts - interval 1 days AS TIMESTAMP)`, `gen_attr_4` AS `CAST(ts - interval 2 days AS TIMESTAMP)`, `gen_attr_5` AS `CAST(ts + interval 1 days AS TIMESTAMP)`, `gen_attr_6` AS `CAST(ts + interval 2 days AS TIMESTAMP)`, `gen_attr_7` AS `CAST(ts - interval 1 days AS TIMESTAMP)`, `gen_attr_8` AS `CAST(ts - interval 2 days AS TIMESTAMP)` FROM (SELECT CAST(`gen_attr_1` + interval 1 days AS TIMESTAMP) AS `gen_attr_0`, CAST(`gen_attr_1` + interval 2 days AS TIMESTAMP) AS `gen_attr_2`, CAST(`gen_attr_1` - interval 1 days AS TIMESTAMP) AS `gen_attr_3`, CAST(`gen_attr_1` - interval 2 days AS TIMESTAMP) AS `gen_attr_4`, CAST(`gen_attr_1` + interval 1 days AS TIMESTAMP) AS `gen_attr_5`, CAST(`gen_attr_1` + interval 2 days AS TIMESTAMP) AS `gen_attr_6`, CAST(`gen_attr_1` - interval 1 days AS TIMESTAMP) AS `gen_attr_7`, CAST(`gen_attr_1` - interval 2 days AS TIMESTAMP) AS `gen_attr_8` FROM (SELECT `ts` AS `gen_attr_1` FROM `default`.`dates`) AS gen_subquery_0) AS gen_subquery_1
...@@ -20,7 +20,9 @@ package org.apache.spark.sql.catalyst ...@@ -20,7 +20,9 @@ package org.apache.spark.sql.catalyst
import java.sql.Timestamp import java.sql.Timestamp
import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.{If, Literal, SpecifiedWindowFrame, WindowSpecDefinition} import org.apache.spark.sql.catalyst.expressions.{If, Literal, SpecifiedWindowFrame, TimeAdd,
TimeSub, WindowSpecDefinition}
import org.apache.spark.unsafe.types.CalendarInterval
class ExpressionSQLBuilderSuite extends SQLBuilderTest { class ExpressionSQLBuilderSuite extends SQLBuilderTest {
test("literal") { test("literal") {
...@@ -119,4 +121,18 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest { ...@@ -119,4 +121,18 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest {
s"(PARTITION BY `a`, `b` ORDER BY `c` ASC NULLS FIRST, `d` DESC NULLS LAST $frame)" s"(PARTITION BY `a`, `b` ORDER BY `c` ASC NULLS FIRST, `d` DESC NULLS LAST $frame)"
) )
} }
test("interval arithmetic") {
val interval = Literal(new CalendarInterval(0, CalendarInterval.MICROS_PER_DAY))
checkSQL(
TimeAdd('a, interval),
"`a` + interval 1 days"
)
checkSQL(
TimeSub('a, interval),
"`a` - interval 1 days"
)
}
} }
...@@ -1145,4 +1145,20 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils { ...@@ -1145,4 +1145,20 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
""".stripMargin, """.stripMargin,
"inline_tables") "inline_tables")
} }
test("SPARK-17750 - interval arithmetic") {
withTable("dates") {
sql("create table dates (ts timestamp)")
checkSQL(
"""
|select ts + interval 1 day, ts + interval 2 days,
| ts - interval 1 day, ts - interval 2 days,
| ts + interval '1' day, ts + interval '2' days,
| ts - interval '1' day, ts - interval '2' days
|from dates
""".stripMargin,
"interval_arithmetic"
)
}
}
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment