diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala index bfcf111385b7eceaf8f5bbd41b4a84c53addbfb4..909b8e31f24580947a8fbffc3ca88bd2a5badcf0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala @@ -88,7 +88,7 @@ final class Decimal extends Ordered[Decimal] with Serializable { if (precision < 19) { return null // Requested precision is too low to represent this value } - this.decimalVal = BigDecimal(unscaled) + this.decimalVal = BigDecimal(unscaled, scale) this.longVal = 0L } else { val p = POW_10(math.min(precision, MAX_LONG_DIGITS)) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala index 6921d15958a557ae65e1fa188d89cc11019e5b6e..f9aceb8d3b13e11a404ac53f4a0dd01f1426f83f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala @@ -44,6 +44,7 @@ class DecimalSuite extends SparkFunSuite with PrivateMethodTester { checkDecimal(Decimal(170L, 4, 2), "1.70", 4, 2) checkDecimal(Decimal(17L, 24, 1), "1.7", 24, 1) checkDecimal(Decimal(1e17.toLong, 18, 0), 1e17.toLong.toString, 18, 0) + checkDecimal(Decimal(1000000000000000000L, 20, 2), "10000000000000000.00", 20, 2) checkDecimal(Decimal(Long.MaxValue), Long.MaxValue.toString, 20, 0) checkDecimal(Decimal(Long.MinValue), Long.MinValue.toString, 20, 0) intercept[IllegalArgumentException](Decimal(170L, 2, 1))