Skip to content
Snippets Groups Projects
Commit df7041d0 authored by Yin Huai's avatar Yin Huai Committed by Davies Liu
Browse files

[SPARK-10196] [SQL] Correctly saving decimals in internal rows to JSON.

https://issues.apache.org/jira/browse/SPARK-10196

Author: Yin Huai <yhuai@databricks.com>

Closes #8408 from yhuai/DecimalJsonSPARK-10196.
parent f023aa2f
No related branches found
No related tags found
No related merge requests found
......@@ -95,7 +95,7 @@ private[sql] object JacksonGenerator {
case (FloatType, v: Float) => gen.writeNumber(v)
case (DoubleType, v: Double) => gen.writeNumber(v)
case (LongType, v: Long) => gen.writeNumber(v)
case (DecimalType(), v: java.math.BigDecimal) => gen.writeNumber(v)
case (DecimalType(), v: Decimal) => gen.writeNumber(v.toJavaBigDecimal)
case (ByteType, v: Byte) => gen.writeNumber(v.toInt)
case (BinaryType, v: Array[Byte]) => gen.writeBinary(v)
case (BooleanType, v: Boolean) => gen.writeBoolean(v)
......
......@@ -17,6 +17,8 @@
package org.apache.spark.sql.sources
import java.math.BigDecimal
import org.apache.hadoop.fs.Path
import org.apache.spark.deploy.SparkHadoopUtil
......@@ -75,4 +77,29 @@ class JsonHadoopFsRelationSuite extends HadoopFsRelationTest {
)
}
}
test("SPARK-10196: save decimal type to JSON") {
withTempDir { file =>
file.delete()
val schema =
new StructType()
.add("decimal", DecimalType(7, 2))
val data =
Row(new BigDecimal("10.02")) ::
Row(new BigDecimal("20000.99")) ::
Row(new BigDecimal("10000")) :: Nil
val df = createDataFrame(sparkContext.parallelize(data), schema)
// Write the data out.
df.write.format(dataSourceName).save(file.getCanonicalPath)
// Read it back and check the result.
checkAnswer(
read.format(dataSourceName).schema(schema).load(file.getCanonicalPath),
df
)
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment