Skip to content
Snippets Groups Projects
Commit 7ea6d282 authored by Cheng Lian's avatar Cheng Lian Committed by Reynold Xin
Browse files

[SPARK-16703][SQL] Remove extra whitespace in SQL generation for window functions

## What changes were proposed in this pull request?

This PR fixes a minor formatting issue of `WindowSpecDefinition.sql` when no partitioning expressions are present.

Before:

```sql
( ORDER BY `a` ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
```

After:

```sql
(ORDER BY `a` ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
```

## How was this patch tested?

New test case added in `ExpressionSQLBuilderSuite`.

Author: Cheng Lian <lian@databricks.com>

Closes #14334 from liancheng/window-spec-sql-format.
parent 79826f3c
No related branches found
No related tags found
No related merge requests found
...@@ -82,16 +82,16 @@ case class WindowSpecDefinition( ...@@ -82,16 +82,16 @@ case class WindowSpecDefinition(
val partition = if (partitionSpec.isEmpty) { val partition = if (partitionSpec.isEmpty) {
"" ""
} else { } else {
"PARTITION BY " + partitionSpec.map(_.sql).mkString(", ") "PARTITION BY " + partitionSpec.map(_.sql).mkString(", ") + " "
} }
val order = if (orderSpec.isEmpty) { val order = if (orderSpec.isEmpty) {
"" ""
} else { } else {
"ORDER BY " + orderSpec.map(_.sql).mkString(", ") "ORDER BY " + orderSpec.map(_.sql).mkString(", ") + " "
} }
s"($partition $order ${frameSpecification.toString})" s"($partition$order${frameSpecification.toString})"
} }
} }
......
-- This file is automatically generated by LogicalPlanToSQLSuite. -- This file is automatically generated by LogicalPlanToSQLSuite.
SELECT MAX(c) + COUNT(a) OVER () FROM parquet_t2 GROUP BY a, b SELECT MAX(c) + COUNT(a) OVER () FROM parquet_t2 GROUP BY a, b
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
SELECT `gen_attr` AS `(max(c) + count(a) OVER ( ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))` FROM (SELECT (`gen_attr` + `gen_attr`) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, count(`gen_attr`) OVER ( ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT max(`gen_attr`) AS `gen_attr`, `gen_attr` FROM (SELECT `a` AS `gen_attr`, `b` AS `gen_attr`, `c` AS `gen_attr`, `d` AS `gen_attr` FROM `default`.`parquet_t2`) AS gen_subquery_0 GROUP BY `gen_attr`, `gen_attr`) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3 SELECT `gen_attr` AS `(max(c) + count(a) OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))` FROM (SELECT (`gen_attr` + `gen_attr`) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, count(`gen_attr`) OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT max(`gen_attr`) AS `gen_attr`, `gen_attr` FROM (SELECT `a` AS `gen_attr`, `b` AS `gen_attr`, `c` AS `gen_attr`, `d` AS `gen_attr` FROM `default`.`parquet_t2`) AS gen_subquery_0 GROUP BY `gen_attr`, `gen_attr`) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3
-- This file is automatically generated by LogicalPlanToSQLSuite. -- This file is automatically generated by LogicalPlanToSQLSuite.
SELECT MAX(key) OVER (PARTITION BY key % 3) + key FROM parquet_t1 SELECT MAX(key) OVER (PARTITION BY key % 3) + key FROM parquet_t1
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
SELECT `gen_attr` AS `(max(key) OVER (PARTITION BY (key % CAST(3 AS BIGINT)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) + key)` FROM (SELECT (`gen_attr` + `gen_attr`) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` % CAST(3 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3 SELECT `gen_attr` AS `(max(key) OVER (PARTITION BY (key % CAST(3 AS BIGINT)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) + key)` FROM (SELECT (`gen_attr` + `gen_attr`) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` % CAST(3 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3
-- This file is automatically generated by LogicalPlanToSQLSuite. -- This file is automatically generated by LogicalPlanToSQLSuite.
SELECT MAX(value) OVER (PARTITION BY key % 3) FROM parquet_t1 SELECT MAX(value) OVER (PARTITION BY key % 3) FROM parquet_t1
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
SELECT `gen_attr` AS `max(value) OVER (PARTITION BY (key % CAST(3 AS BIGINT)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)` FROM (SELECT `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` % CAST(3 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3 SELECT `gen_attr` AS `max(value) OVER (PARTITION BY (key % CAST(3 AS BIGINT)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)` FROM (SELECT `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` % CAST(3 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3
...@@ -2,4 +2,4 @@ ...@@ -2,4 +2,4 @@
SELECT key, value, ROUND(AVG(key) OVER (), 2) SELECT key, value, ROUND(AVG(key) OVER (), 2)
FROM parquet_t1 ORDER BY key FROM parquet_t1 ORDER BY key
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `round(avg(key) OVER ( ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), 2)` FROM (SELECT `gen_attr`, `gen_attr`, round(`gen_attr`, 2) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, avg(`gen_attr`) OVER ( ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2 ORDER BY `gen_attr` ASC) AS parquet_t1 SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `round(avg(key) OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), 2)` FROM (SELECT `gen_attr`, `gen_attr`, round(`gen_attr`, 2) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, avg(`gen_attr`) OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2 ORDER BY `gen_attr` ASC) AS parquet_t1
...@@ -20,8 +20,7 @@ package org.apache.spark.sql.catalyst ...@@ -20,8 +20,7 @@ package org.apache.spark.sql.catalyst
import java.sql.Timestamp import java.sql.Timestamp
import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.{If, Literal} import org.apache.spark.sql.catalyst.expressions.{If, Literal, SpecifiedWindowFrame, WindowSpecDefinition}
class ExpressionSQLBuilderSuite extends SQLBuilderTest { class ExpressionSQLBuilderSuite extends SQLBuilderTest {
test("literal") { test("literal") {
...@@ -79,4 +78,36 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest { ...@@ -79,4 +78,36 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest {
checkSQL(-'a.int, "(-`a`)") checkSQL(-'a.int, "(-`a`)")
checkSQL(-('a.int + 'b.int), "(-(`a` + `b`))") checkSQL(-('a.int + 'b.int), "(-(`a` + `b`))")
} }
test("window specification") {
val frame = SpecifiedWindowFrame.defaultWindowFrame(
hasOrderSpecification = true,
acceptWindowFrame = true
)
checkSQL(
WindowSpecDefinition('a.int :: Nil, Nil, frame),
s"(PARTITION BY `a` $frame)"
)
checkSQL(
WindowSpecDefinition('a.int :: 'b.string :: Nil, Nil, frame),
s"(PARTITION BY `a`, `b` $frame)"
)
checkSQL(
WindowSpecDefinition(Nil, 'a.int.asc :: Nil, frame),
s"(ORDER BY `a` ASC $frame)"
)
checkSQL(
WindowSpecDefinition(Nil, 'a.int.asc :: 'b.string.desc :: Nil, frame),
s"(ORDER BY `a` ASC, `b` DESC $frame)"
)
checkSQL(
WindowSpecDefinition('a.int :: 'b.string :: Nil, 'c.int.asc :: 'd.string.desc :: Nil, frame),
s"(PARTITION BY `a`, `b` ORDER BY `c` ASC, `d` DESC $frame)"
)
}
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment