Skip to content

Commit ed9be06

Browse files
adrian-wangyhuai
authored andcommitted
[SPARK-7330] [SQL] avoid NPE at jdbc rdd
Thank nadavoosh point this out in apache#5590 Author: Daoyuan Wang <[email protected]> Closes apache#5877 from adrian-wang/jdbcrdd and squashes the following commits: cc11900 [Daoyuan Wang] avoid NPE in jdbcrdd
1 parent 4f87e95 commit ed9be06

File tree

2 files changed

+32
-1
lines changed

2 files changed

+32
-1
lines changed

sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,13 @@ private[sql] class JDBCRDD(
362362
conversions(i) match {
363363
case BooleanConversion => mutableRow.setBoolean(i, rs.getBoolean(pos))
364364
case DateConversion =>
365-
mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
365+
// DateUtils.fromJavaDate does not handle null value, so we need to check it.
366+
val dateVal = rs.getDate(pos)
367+
if (dateVal != null) {
368+
mutableRow.update(i, DateUtils.fromJavaDate(dateVal))
369+
} else {
370+
mutableRow.update(i, null)
371+
}
366372
case DecimalConversion =>
367373
val decimalVal = rs.getBigDecimal(pos)
368374
if (decimalVal == null) {

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,8 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
104104
).executeUpdate()
105105
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
106106
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
107+
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
108+
+ "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
107109
conn.commit()
108110
sql(
109111
s"""
@@ -127,6 +129,23 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
127129
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
128130
""".stripMargin.replaceAll("\n", " "))
129131

132+
conn.prepareStatement(
133+
s"""
134+
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
135+
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
136+
|m DOUBLE, n REAL, o DECIMAL(40, 20))
137+
""".stripMargin.replaceAll("\n", " ")).executeUpdate()
138+
conn.prepareStatement("insert into test.nulltypes values ("
139+
+ "null, null, null, null, null, null, null, null, null, "
140+
+ "null, null, null, null, null, null)").executeUpdate()
141+
conn.commit()
142+
sql(
143+
s"""
144+
|CREATE TEMPORARY TABLE nulltypes
145+
|USING org.apache.spark.sql.jdbc
146+
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
147+
""".stripMargin.replaceAll("\n", " "))
148+
130149
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
131150
}
132151

@@ -254,6 +273,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
254273
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").collect()
255274
val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").cache().collect()
256275
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
276+
assert(rows(1).getAs[java.sql.Date](1) === null)
257277
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
258278
}
259279

@@ -266,6 +286,11 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
266286
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
267287
}
268288

289+
test("test types for null value") {
290+
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.NULLTYPES").collect()
291+
assert((0 to 14).forall(i => rows(0).isNullAt(i)))
292+
}
293+
269294
test("H2 floating-point types") {
270295
val rows = sql("SELECT * FROM flttypes").collect()
271296
assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.

0 commit comments

Comments
 (0)