From 9fe9769809b177a9967b195ecf78a3717e1cfd8f Mon Sep 17 00:00:00 2001 From: skestle Date: Tue, 19 Jan 2021 12:06:48 +1300 Subject: [PATCH] SPARK-33888 Restored scale for ARRAY type This satisfies PostgresDialect's requirement for scale in Numeric arrays --- .../spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index 85a05f42c77fa..87ed62e174dca 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -306,13 +306,14 @@ object JdbcUtils extends Logging { } val metadata = new MetadataBuilder() // SPARK-33888 - // - include scale in metadata for only DECIMAL & NUMERIC + // - include scale in metadata for only DECIMAL & NUMERIC as well as ARRAY (for Postgres) // - include TIME type metadata // - always build the metadata dataType match { // scalastyle:off case java.sql.Types.NUMERIC => metadata.putLong("scale", fieldScale) case java.sql.Types.DECIMAL => metadata.putLong("scale", fieldScale) + case java.sql.Types.ARRAY => metadata.putLong("scale", fieldScale) // PostgresDialect.scala wants this information case java.sql.Types.TIME => metadata.putBoolean("logical_time_type", true) case _ => // scalastyle:on