diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java index 8fdd7399602d249ee7a94123005ebb0992e5617b..32faad374015c6ca7af7a153208bef62fecef801 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java @@ -47,7 +47,7 @@ public class UnsafeRowWriters { target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L); } - // Write the string to the variable length portion. + // Write the bytes to the variable length portion. input.writeToMemory(target.getBaseObject(), offset); // Set the fixed length portion. @@ -73,7 +73,7 @@ public class UnsafeRowWriters { target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L); } - // Write the string to the variable length portion. + // Write the bytes to the variable length portion. ByteArray.writeToMemory(input, target.getBaseObject(), offset); // Set the fixed length portion. @@ -115,7 +115,7 @@ public class UnsafeRowWriters { target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L); } - // Write the string to the variable length portion. + // Write the bytes to the variable length portion. row.writeToMemory(target.getBaseObject(), offset); // Set the fixed length portion. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala index 3e87f7285847c4860fb4f36586bb2f8dc6d61664..9a4c00e86a3ec81b8e2add10da54fcc9f59f4e4d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala @@ -62,14 +62,10 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro val cursor = ctx.freshName("cursor") val numBytes = ctx.freshName("numBytes") - val exprs = expressions.zipWithIndex.map { case (e, i) => - e.dataType match { - case st: StructType => - createCodeForStruct(ctx, e.gen(ctx), st) - case _ => - e.gen(ctx) - } - } + val exprs = expressions.map { e => e.dataType match { + case st: StructType => createCodeForStruct(ctx, e.gen(ctx), st) + case _ => e.gen(ctx) + }} val allExprs = exprs.map(_.code).mkString("\n") val fixedSize = 8 * exprs.length + UnsafeRow.calculateBitSetWidthInBytes(exprs.length) @@ -153,20 +149,20 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro val exprs: Seq[GeneratedExpressionCode] = schema.map(_.dataType).zipWithIndex.map { case (dt, i) => dt match { - case st: StructType => - val nestedStructEv = GeneratedExpressionCode( - code = "", - isNull = s"${input.primitive}.isNullAt($i)", - primitive = s"${ctx.getColumn(input.primitive, dt, i)}" - ) - createCodeForStruct(ctx, nestedStructEv, st) - case _ => - GeneratedExpressionCode( - code = "", - isNull = s"${input.primitive}.isNullAt($i)", - primitive = s"${ctx.getColumn(input.primitive, dt, i)}" - ) - } + case st: StructType => + val nestedStructEv = GeneratedExpressionCode( + code = "", + isNull = s"${input.primitive}.isNullAt($i)", + primitive = s"${ctx.getColumn(input.primitive, dt, i)}" + ) + createCodeForStruct(ctx, nestedStructEv, st) + case _ => + GeneratedExpressionCode( + code = "", + isNull = s"${input.primitive}.isNullAt($i)", + primitive = s"${ctx.getColumn(input.primitive, dt, i)}" + ) + } } val allExprs = exprs.map(_.code).mkString("\n") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index 314b85f126dd2db43423106813907c32a6f48a30..f3ef066528ff839691fd56e020e4da7880cbabb2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala @@ -339,7 +339,8 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] { * if necessary. */ def getSortOperator(sortExprs: Seq[SortOrder], global: Boolean, child: SparkPlan): SparkPlan = { - if (sqlContext.conf.unsafeEnabled && UnsafeExternalSort.supportsSchema(child.schema)) { + if (sqlContext.conf.unsafeEnabled && sqlContext.conf.codegenEnabled && + UnsafeExternalSort.supportsSchema(child.schema)) { execution.UnsafeExternalSort(sortExprs, global, child) } else if (sqlContext.conf.externalSortEnabled) { execution.ExternalSort(sortExprs, global, child)