Skip to content

Commit ed5c2dc

Browse files
adrian-wangrxin
authored andcommitted
[SPARK-8158] [SQL] several fix for HiveShim
1. explicitly import implicit conversion support. 2. use .nonEmpty instead of .size > 0 3. use val instead of var 4. comment indention Author: Daoyuan Wang <[email protected]> Closes #6700 from adrian-wang/shimsimprove and squashes the following commits: d22e108 [Daoyuan Wang] several fix for HiveShim
1 parent 49f19b9 commit ed5c2dc

File tree

1 file changed

+11
-10
lines changed

1 file changed

+11
-10
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@ package org.apache.spark.sql.hive
2020
import java.io.{InputStream, OutputStream}
2121
import java.rmi.server.UID
2222

23+
/* Implicit conversions */
24+
import scala.collection.JavaConversions._
25+
import scala.language.implicitConversions
26+
import scala.reflect.ClassTag
27+
2328
import com.esotericsoftware.kryo.Kryo
2429
import com.esotericsoftware.kryo.io.{Input, Output}
2530
import org.apache.hadoop.conf.Configuration
@@ -35,10 +40,6 @@ import org.apache.spark.Logging
3540
import org.apache.spark.sql.types.Decimal
3641
import org.apache.spark.util.Utils
3742

38-
/* Implicit conversions */
39-
import scala.collection.JavaConversions._
40-
import scala.reflect.ClassTag
41-
4243
private[hive] object HiveShim {
4344
// Precision and scale to pass for unlimited decimals; these are the same as the precision and
4445
// scale Hive 0.13 infers for BigDecimals from sources that don't specify them (e.g. UDFs)
@@ -68,10 +69,10 @@ private[hive] object HiveShim {
6869
* Cannot use ColumnProjectionUtils.appendReadColumns directly, if ids is null or empty
6970
*/
7071
def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) {
71-
if (ids != null && ids.size > 0) {
72+
if (ids != null && ids.nonEmpty) {
7273
ColumnProjectionUtils.appendReadColumns(conf, ids)
7374
}
74-
if (names != null && names.size > 0) {
75+
if (names != null && names.nonEmpty) {
7576
appendReadColumnNames(conf, names)
7677
}
7778
}
@@ -197,11 +198,11 @@ private[hive] object HiveShim {
197198
}
198199

199200
/*
200-
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
201-
* Fix it through wrapper.
202-
* */
201+
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
202+
* Fix it through wrapper.
203+
*/
203204
implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = {
204-
var f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
205+
val f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
205206
f.setCompressCodec(w.compressCodec)
206207
f.setCompressType(w.compressType)
207208
f.setTableInfo(w.tableInfo)

0 commit comments

Comments
 (0)