Skip to content

Commit f174cdc

Browse files
sureshthalamatigatorsmile
authored andcommitted
[SPARK-14536][SQL] fix to handle null value in array type column for postgres.
## What changes were proposed in this pull request? JDBC read is failing with NPE due to missing null value check for array data type if the source table has null values in the array type column. For null values Resultset.getArray() returns null. This PR adds null safe check to the Resultset.getArray() value before invoking method on the Array object. ## How was this patch tested? Updated the PostgresIntegration test suite to test null values. Ran docker integration tests on my laptop. Author: sureshthalamati <[email protected]> Closes #15192 from sureshthalamati/jdbc_array_null_fix-SPARK-14536.
1 parent 54268b4 commit f174cdc

File tree

2 files changed

+13
-5
lines changed

2 files changed

+13
-5
lines changed

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,17 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
5151
+ "B'1000100101', E'\\\\xDEADBEEF', true, '172.16.0.42', '192.168.0.0/16', "
5252
+ """'{1, 2}', '{"a", null, "b"}', '{0.11, 0.22}', '{0.11, 0.22}', 'd1', 1.01, 1)"""
5353
).executeUpdate()
54+
conn.prepareStatement("INSERT INTO bar VALUES (null, null, null, null, null, "
55+
+ "null, null, null, null, null, "
56+
+ "null, null, null, null, null, null, null)"
57+
).executeUpdate()
5458
}
5559

5660
test("Type mapping for various types") {
5761
val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties)
58-
val rows = df.collect()
59-
assert(rows.length == 1)
62+
val rows = df.collect().sortBy(_.toString())
63+
assert(rows.length == 2)
64+
// Test the types, and values using the first row.
6065
val types = rows(0).toSeq.map(x => x.getClass)
6166
assert(types.length == 17)
6267
assert(classOf[String].isAssignableFrom(types(0)))
@@ -96,6 +101,9 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
96101
assert(rows(0).getString(14) == "d1")
97102
assert(rows(0).getFloat(15) == 1.01f)
98103
assert(rows(0).getShort(16) == 1)
104+
105+
// Test reading null values using the second row.
106+
assert(0.until(16).forall(rows(1).isNullAt(_)))
99107
}
100108

101109
test("Basic write test") {

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -465,9 +465,9 @@ object JdbcUtils extends Logging {
465465
}
466466

467467
(rs: ResultSet, row: InternalRow, pos: Int) =>
468-
val array = nullSafeConvert[Object](
469-
rs.getArray(pos + 1).getArray,
470-
array => new GenericArrayData(elementConversion.apply(array)))
468+
val array = nullSafeConvert[java.sql.Array](
469+
input = rs.getArray(pos + 1),
470+
array => new GenericArrayData(elementConversion.apply(array.getArray)))
471471
row.update(pos, array)
472472

473473
case _ => throw new IllegalArgumentException(s"Unsupported type ${dt.simpleString}")

0 commit comments

Comments
 (0)