Skip to content

Commit 566d154

Browse files
committed
[SPARK-2710][SQL] use while loop instead of the zipWithIndex and foreach, use new SchemaRDD API in test suite to fix warning
1 parent b98c598 commit 566d154

File tree

2 files changed

+20
-20
lines changed

2 files changed

+20
-20
lines changed

sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcResultSetRDD.scala

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -45,26 +45,26 @@ private[sql] object JdbcResultSetRDD extends Logging {
4545
jdbcResultSet.map(asRow(_, row, schema.fields))
4646
}
4747

48-
private def asRow(rs: ResultSet, row: GenericMutableRow, schema: Seq[StructField]): Row = {
49-
schema.zipWithIndex.foreach {
50-
case (StructField(name, dataType, nullable), i) => {
51-
dataType match {
52-
case StringType => row.update(i, rs.getString(i + 1))
53-
case DecimalType => row.update(i, rs.getBigDecimal(i + 1))
54-
case BooleanType => row.update(i, rs.getBoolean(i + 1))
55-
case ByteType => row.update(i, rs.getByte(i + 1))
56-
case ShortType => row.update(i, rs.getShort(i + 1))
57-
case IntegerType => row.update(i, rs.getInt(i + 1))
58-
case LongType => row.update(i, rs.getLong(i + 1))
59-
case FloatType => row.update(i, rs.getFloat(i + 1))
60-
case DoubleType => row.update(i, rs.getDouble(i + 1))
61-
case BinaryType => row.update(i, rs.getBytes(i + 1))
62-
case TimestampType => row.update(i, rs.getTimestamp(i + 1))
63-
case _ => sys.error(
64-
s"Unsupported jdbc datatype")
65-
}
66-
if (rs.wasNull) row.update(i, null)
48+
private def asRow(rs: ResultSet, row: GenericMutableRow, schemaFields: Seq[StructField]): Row = {
49+
var i = 0
50+
while (i < schemaFields.length) {
51+
schemaFields(i).dataType match {
52+
case StringType => row.update(i, rs.getString(i + 1))
53+
case DecimalType => row.update(i, rs.getBigDecimal(i + 1))
54+
case BooleanType => row.update(i, rs.getBoolean(i + 1))
55+
case ByteType => row.update(i, rs.getByte(i + 1))
56+
case ShortType => row.update(i, rs.getShort(i + 1))
57+
case IntegerType => row.update(i, rs.getInt(i + 1))
58+
case LongType => row.update(i, rs.getLong(i + 1))
59+
case FloatType => row.update(i, rs.getFloat(i + 1))
60+
case DoubleType => row.update(i, rs.getDouble(i + 1))
61+
case BinaryType => row.update(i, rs.getBytes(i + 1))
62+
case TimestampType => row.update(i, rs.getTimestamp(i + 1))
63+
case _ => sys.error(
64+
s"Unsupported jdbc datatype")
6765
}
66+
if (rs.wasNull) row.update(i, null)
67+
i += 1
6868
}
6969

7070
row

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JdbcResultSetRDDSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class JdbcResultSetRDDSuite extends QueryTest with BeforeAndAfter {
5252

5353
test("basic functionality") {
5454
val jdbcResultSetRDD = jdbcResultSet("jdbc:derby:target/JdbcSchemaRDDSuiteDb", "SELECT DATA FROM FOO")
55-
jdbcResultSetRDD.registerAsTable("foo")
55+
jdbcResultSetRDD.registerTempTable("foo")
5656

5757
checkAnswer(
5858
sql("select count(*) from foo"),

0 commit comments

Comments
 (0)