Skip to content

Commit ed0824a

Browse files
author
kneeraj
authored
Fix to SNAP-2247 (#114)
* This is a Spark bug. Please see PR apache#17529 Needed to do similar change in the code path of prepared statement where precision needed to be adjusted if smaller than scale.
1 parent 1ba1337 commit ed0824a

File tree

1 file changed

+12
-2
lines changed

1 file changed

+12
-2
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -431,8 +431,18 @@ object CatalystTypeConverters {
431431
case s: String => StringConverter.toCatalyst(s)
432432
case d: Date => DateConverter.toCatalyst(d)
433433
case t: Timestamp => TimestampConverter.toCatalyst(t)
434-
case d: BigDecimal => new DecimalConverter(DecimalType(d.precision, d.scale)).toCatalyst(d)
435-
case d: JavaBigDecimal => new DecimalConverter(DecimalType(d.precision, d.scale)).toCatalyst(d)
434+
case d: BigDecimal =>
435+
var precision = d.precision
436+
if (d.precision < d.scale) {
437+
precision = d.scale + 1
438+
}
439+
new DecimalConverter(DecimalType(precision, d.scale)).toCatalyst(d)
440+
case d: JavaBigDecimal =>
441+
var precision = d.precision
442+
if (d.precision < d.scale) {
443+
precision = d.scale + 1
444+
}
445+
new DecimalConverter(DecimalType(precision, d.scale)).toCatalyst(d)
436446
case seq: Seq[Any] => new GenericArrayData(seq.map(convertToCatalyst).toArray)
437447
case r: Row => InternalRow(r.toSeq.map(convertToCatalyst): _*)
438448
case arr: Array[Any] => new GenericArrayData(arr.map(convertToCatalyst))

0 commit comments

Comments
 (0)