Skip to content
Snippets Groups Projects
Commit ed5c2dcc authored by Daoyuan Wang's avatar Daoyuan Wang Committed by Reynold Xin
Browse files

[SPARK-8158] [SQL] several fix for HiveShim

1. explicitly import implicit conversion support.
2. use .nonEmpty instead of .size > 0
3. use val instead of var
4. comment indention

Author: Daoyuan Wang <daoyuan.wang@intel.com>

Closes #6700 from adrian-wang/shimsimprove and squashes the following commits:

d22e108 [Daoyuan Wang] several fix for HiveShim
parent 49f19b95
No related branches found
No related tags found
No related merge requests found
...@@ -20,6 +20,11 @@ package org.apache.spark.sql.hive ...@@ -20,6 +20,11 @@ package org.apache.spark.sql.hive
import java.io.{InputStream, OutputStream} import java.io.{InputStream, OutputStream}
import java.rmi.server.UID import java.rmi.server.UID
/* Implicit conversions */
import scala.collection.JavaConversions._
import scala.language.implicitConversions
import scala.reflect.ClassTag
import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.io.{Input, Output}
import org.apache.hadoop.conf.Configuration import org.apache.hadoop.conf.Configuration
...@@ -35,10 +40,6 @@ import org.apache.spark.Logging ...@@ -35,10 +40,6 @@ import org.apache.spark.Logging
import org.apache.spark.sql.types.Decimal import org.apache.spark.sql.types.Decimal
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
/* Implicit conversions */
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
private[hive] object HiveShim { private[hive] object HiveShim {
// Precision and scale to pass for unlimited decimals; these are the same as the precision and // Precision and scale to pass for unlimited decimals; these are the same as the precision and
// scale Hive 0.13 infers for BigDecimals from sources that don't specify them (e.g. UDFs) // scale Hive 0.13 infers for BigDecimals from sources that don't specify them (e.g. UDFs)
...@@ -68,10 +69,10 @@ private[hive] object HiveShim { ...@@ -68,10 +69,10 @@ private[hive] object HiveShim {
* Cannot use ColumnProjectionUtils.appendReadColumns directly, if ids is null or empty * Cannot use ColumnProjectionUtils.appendReadColumns directly, if ids is null or empty
*/ */
def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) { def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) {
if (ids != null && ids.size > 0) { if (ids != null && ids.nonEmpty) {
ColumnProjectionUtils.appendReadColumns(conf, ids) ColumnProjectionUtils.appendReadColumns(conf, ids)
} }
if (names != null && names.size > 0) { if (names != null && names.nonEmpty) {
appendReadColumnNames(conf, names) appendReadColumnNames(conf, names)
} }
} }
...@@ -197,11 +198,11 @@ private[hive] object HiveShim { ...@@ -197,11 +198,11 @@ private[hive] object HiveShim {
} }
/* /*
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not. * Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
* Fix it through wrapper. * Fix it through wrapper.
* */ */
implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = { implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = {
var f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed) val f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
f.setCompressCodec(w.compressCodec) f.setCompressCodec(w.compressCodec)
f.setCompressType(w.compressType) f.setCompressType(w.compressType)
f.setTableInfo(w.tableInfo) f.setTableInfo(w.tableInfo)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment