Skip to content
Snippets Groups Projects
Commit ec89bd84 authored by Davies Liu's avatar Davies Liu Committed by Yin Huai
Browse files

[SPARK-10245] [SQL] Fix decimal literals with precision < scale

In BigDecimal or java.math.BigDecimal, the precision could be smaller than scale, for example, BigDecimal("0.001") has precision = 1 and scale = 3. But DecimalType require that the precision should be larger than scale, so we should use the maximum of precision and scale when inferring the schema from decimal literal.

Author: Davies Liu <davies@databricks.com>

Closes #8428 from davies/smaller_decimal.
parent 00ae4be9
No related branches found
No related tags found
No related merge requests found
......@@ -36,9 +36,10 @@ object Literal {
case s: Short => Literal(s, ShortType)
case s: String => Literal(UTF8String.fromString(s), StringType)
case b: Boolean => Literal(b, BooleanType)
case d: BigDecimal => Literal(Decimal(d), DecimalType(d.precision, d.scale))
case d: java.math.BigDecimal => Literal(Decimal(d), DecimalType(d.precision(), d.scale()))
case d: Decimal => Literal(d, DecimalType(d.precision, d.scale))
case d: BigDecimal => Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale))
case d: java.math.BigDecimal =>
Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale()))
case d: Decimal => Literal(d, DecimalType(Math.max(d.precision, d.scale), d.scale))
case t: Timestamp => Literal(DateTimeUtils.fromJavaTimestamp(t), TimestampType)
case d: Date => Literal(DateTimeUtils.fromJavaDate(d), DateType)
case a: Array[Byte] => Literal(a, BinaryType)
......
......@@ -83,12 +83,14 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("decimal") {
List(0.0, 1.2, 1.1111, 5).foreach { d =>
List(-0.0001, 0.0, 0.001, 1.2, 1.1111, 5).foreach { d =>
checkEvaluation(Literal(Decimal(d)), Decimal(d))
checkEvaluation(Literal(Decimal(d.toInt)), Decimal(d.toInt))
checkEvaluation(Literal(Decimal(d.toLong)), Decimal(d.toLong))
checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 1)),
Decimal((d * 1000L).toLong, 10, 1))
checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 3)),
Decimal((d * 1000L).toLong, 10, 3))
checkEvaluation(Literal(BigDecimal(d.toString)), Decimal(d))
checkEvaluation(Literal(new java.math.BigDecimal(d.toString)), Decimal(d))
}
}
......
......@@ -1627,6 +1627,16 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
Row(null))
}
test("precision smaller than scale") {
checkAnswer(sql("select 10.00"), Row(BigDecimal("10.00")))
checkAnswer(sql("select 1.00"), Row(BigDecimal("1.00")))
checkAnswer(sql("select 0.10"), Row(BigDecimal("0.10")))
checkAnswer(sql("select 0.01"), Row(BigDecimal("0.01")))
checkAnswer(sql("select 0.001"), Row(BigDecimal("0.001")))
checkAnswer(sql("select -0.01"), Row(BigDecimal("-0.01")))
checkAnswer(sql("select -0.001"), Row(BigDecimal("-0.001")))
}
test("external sorting updates peak execution memory") {
withSQLConf((SQLConf.EXTERNAL_SORT.key, "true")) {
val sc = sqlContext.sparkContext
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment