diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala index d409271fbc6b5f6177d80f188be254627698b096..98efba199ad472edb6903a8b2b6d0032fc6713ee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala @@ -78,10 +78,10 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT ("containsNull" -> containsNull) /** - * The default size of a value of the ArrayType is 100 * the default size of the element type. - * (We assume that there are 100 elements). + * The default size of a value of the ArrayType is the default size of the element type. + * We assume that there is only 1 element on average in an array. See SPARK-18853. */ - override def defaultSize: Int = 100 * elementType.defaultSize + override def defaultSize: Int = 1 * elementType.defaultSize override def simpleString: String = s"array<${elementType.simpleString}>" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala index fbf3a617862512b56905d14cb6be520a67d3b12e..6691b81dcea8de5638ca59afc3e4cec4295d41a9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala @@ -56,10 +56,10 @@ case class MapType( /** * The default size of a value of the MapType is - * 100 * (the default size of the key type + the default size of the value type). - * (We assume that there are 100 elements). + * (the default size of the key type + the default size of the value type). + * We assume that there is only 1 element on average in a map. See SPARK-18853. */ - override def defaultSize: Int = 100 * (keyType.defaultSize + valueType.defaultSize) + override def defaultSize: Int = 1 * (keyType.defaultSize + valueType.defaultSize) override def simpleString: String = s"map<${keyType.simpleString},${valueType.simpleString}>" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala index b8ab9a9963de82862a3e64a592340e4b31307b67..12d2c00dc9c49814d1bf0e5a8a2f4bb3e2b46e87 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala @@ -253,7 +253,7 @@ class DataTypeSuite extends SparkFunSuite { checkDataTypeJsonRepr(structType) def checkDefaultSize(dataType: DataType, expectedDefaultSize: Int): Unit = { - test(s"Check the default size of ${dataType}") { + test(s"Check the default size of $dataType") { assert(dataType.defaultSize === expectedDefaultSize) } } @@ -272,18 +272,18 @@ class DataTypeSuite extends SparkFunSuite { checkDefaultSize(TimestampType, 8) checkDefaultSize(StringType, 20) checkDefaultSize(BinaryType, 100) - checkDefaultSize(ArrayType(DoubleType, true), 800) - checkDefaultSize(ArrayType(StringType, false), 2000) - checkDefaultSize(MapType(IntegerType, StringType, true), 2400) - checkDefaultSize(MapType(IntegerType, ArrayType(DoubleType), false), 80400) - checkDefaultSize(structType, 812) + checkDefaultSize(ArrayType(DoubleType, true), 8) + checkDefaultSize(ArrayType(StringType, false), 20) + checkDefaultSize(MapType(IntegerType, StringType, true), 24) + checkDefaultSize(MapType(IntegerType, ArrayType(DoubleType), false), 12) + checkDefaultSize(structType, 20) def checkEqualsIgnoreCompatibleNullability( from: DataType, to: DataType, expected: Boolean): Unit = { val testName = - s"equalsIgnoreCompatibleNullability: (from: ${from}, to: ${to})" + s"equalsIgnoreCompatibleNullability: (from: $from, to: $to)" test(testName) { assert(DataType.equalsIgnoreCompatibleNullability(from, to) === expected) }