Skip to content
Snippets Groups Projects
Commit c71c6853 authored by Sameer Agarwal's avatar Sameer Agarwal Committed by Reynold Xin
Browse files

[SPARK-14870][SQL][FOLLOW-UP] Move decimalDataWithNulls in DataFrameAggregateSuite

## What changes were proposed in this pull request?

Minor followup to https://github.com/apache/spark/pull/12651

## How was this patch tested?

Test-only change

Author: Sameer Agarwal <sameer@databricks.com>

Closes #12674 from sameeragarwal/tpcds-fix-2.
parent cfa64882
No related branches found
No related tags found
No related merge requests found
...@@ -21,6 +21,7 @@ import org.apache.spark.sql.expressions.Window ...@@ -21,6 +21,7 @@ import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.test.SQLTestData.DecimalData
import org.apache.spark.sql.types.DecimalType import org.apache.spark.sql.types.DecimalType
case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double) case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double)
...@@ -69,6 +70,14 @@ class DataFrameAggregateSuite extends QueryTest with SharedSQLContext { ...@@ -69,6 +70,14 @@ class DataFrameAggregateSuite extends QueryTest with SharedSQLContext {
Row(new java.math.BigDecimal(3.0), new java.math.BigDecimal(3.0))) Row(new java.math.BigDecimal(3.0), new java.math.BigDecimal(3.0)))
) )
val decimalDataWithNulls = sqlContext.sparkContext.parallelize(
DecimalData(1, 1) ::
DecimalData(1, null) ::
DecimalData(2, 1) ::
DecimalData(2, null) ::
DecimalData(3, 1) ::
DecimalData(3, 2) ::
DecimalData(null, 2) :: Nil).toDF()
checkAnswer( checkAnswer(
decimalDataWithNulls.groupBy("a").agg(sum("b")), decimalDataWithNulls.groupBy("a").agg(sum("b")),
Seq(Row(new java.math.BigDecimal(1.0), new java.math.BigDecimal(1.0)), Seq(Row(new java.math.BigDecimal(1.0), new java.math.BigDecimal(1.0)),
......
...@@ -103,19 +103,6 @@ private[sql] trait SQLTestData { self => ...@@ -103,19 +103,6 @@ private[sql] trait SQLTestData { self =>
df df
} }
protected lazy val decimalDataWithNulls: DataFrame = {
val df = sqlContext.sparkContext.parallelize(
DecimalDataWithNulls(1, 1) ::
DecimalDataWithNulls(1, null) ::
DecimalDataWithNulls(2, 1) ::
DecimalDataWithNulls(2, null) ::
DecimalDataWithNulls(3, 1) ::
DecimalDataWithNulls(3, 2) ::
DecimalDataWithNulls(null, 2) :: Nil).toDF()
df.registerTempTable("decimalDataWithNulls")
df
}
protected lazy val binaryData: DataFrame = { protected lazy val binaryData: DataFrame = {
val df = sqlContext.sparkContext.parallelize( val df = sqlContext.sparkContext.parallelize(
BinaryData("12".getBytes(StandardCharsets.UTF_8), 1) :: BinaryData("12".getBytes(StandardCharsets.UTF_8), 1) ::
...@@ -280,7 +267,6 @@ private[sql] trait SQLTestData { self => ...@@ -280,7 +267,6 @@ private[sql] trait SQLTestData { self =>
negativeData negativeData
largeAndSmallInts largeAndSmallInts
decimalData decimalData
decimalDataWithNulls
binaryData binaryData
upperCaseData upperCaseData
lowerCaseData lowerCaseData
...@@ -310,7 +296,6 @@ private[sql] object SQLTestData { ...@@ -310,7 +296,6 @@ private[sql] object SQLTestData {
case class TestData3(a: Int, b: Option[Int]) case class TestData3(a: Int, b: Option[Int])
case class LargeAndSmallInts(a: Int, b: Int) case class LargeAndSmallInts(a: Int, b: Int)
case class DecimalData(a: BigDecimal, b: BigDecimal) case class DecimalData(a: BigDecimal, b: BigDecimal)
case class DecimalDataWithNulls(a: BigDecimal, b: BigDecimal)
case class BinaryData(a: Array[Byte], b: Int) case class BinaryData(a: Array[Byte], b: Int)
case class UpperCaseData(N: Int, L: String) case class UpperCaseData(N: Int, L: String)
case class LowerCaseData(n: Int, l: String) case class LowerCaseData(n: Int, l: String)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment