File tree Expand file tree Collapse file tree 2 files changed +3
-2
lines changed
external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc Expand file tree Collapse file tree 2 files changed +3
-2
lines changed Original file line number Diff line number Diff line change @@ -61,6 +61,7 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
6161 val df = sqlContext.read.jdbc(jdbcUrl, " bar" , new Properties )
6262 val rows = df.collect().sortBy(_.toString())
6363 assert(rows.length == 2 )
64+ // Test the types, and values using the first row.
6465 val types = rows(0 ).toSeq.map(x => x.getClass)
6566 assert(types.length == 17 )
6667 assert(classOf [String ].isAssignableFrom(types(0 )))
@@ -101,7 +102,7 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
101102 assert(rows(0 ).getFloat(15 ) == 1.01f )
102103 assert(rows(0 ).getShort(16 ) == 1 )
103104
104- // Test reading null values.
105+ // Test reading null values using the second row .
105106 assert(0 .until(16 ).forall(rows(1 ).isNullAt(_)))
106107 }
107108
Original file line number Diff line number Diff line change @@ -466,7 +466,7 @@ object JdbcUtils extends Logging {
466466
467467 (rs : ResultSet , row : InternalRow , pos : Int ) =>
468468 val array = nullSafeConvert[java.sql.Array ](
469- rs.getArray(pos + 1 ),
469+ input = rs.getArray(pos + 1 ),
470470 array => new GenericArrayData (elementConversion.apply(array.getArray)))
471471 row.update(pos, array)
472472
You can’t perform that action at this time.
0 commit comments