Skip to content

Commit ec89bd8

Browse files
Davies Liuyhuai
authored andcommitted
[SPARK-10245] [SQL] Fix decimal literals with precision < scale
In BigDecimal or java.math.BigDecimal, the precision could be smaller than scale, for example, BigDecimal("0.001") has precision = 1 and scale = 3. But DecimalType require that the precision should be larger than scale, so we should use the maximum of precision and scale when inferring the schema from decimal literal. Author: Davies Liu <[email protected]> Closes #8428 from davies/smaller_decimal.
1 parent 00ae4be commit ec89bd8

File tree

3 files changed

+19
-6
lines changed

3 files changed

+19
-6
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,10 @@ object Literal {
3636
case s: Short => Literal(s, ShortType)
3737
case s: String => Literal(UTF8String.fromString(s), StringType)
3838
case b: Boolean => Literal(b, BooleanType)
39-
case d: BigDecimal => Literal(Decimal(d), DecimalType(d.precision, d.scale))
40-
case d: java.math.BigDecimal => Literal(Decimal(d), DecimalType(d.precision(), d.scale()))
41-
case d: Decimal => Literal(d, DecimalType(d.precision, d.scale))
39+
case d: BigDecimal => Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale))
40+
case d: java.math.BigDecimal =>
41+
Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale()))
42+
case d: Decimal => Literal(d, DecimalType(Math.max(d.precision, d.scale), d.scale))
4243
case t: Timestamp => Literal(DateTimeUtils.fromJavaTimestamp(t), TimestampType)
4344
case d: Date => Literal(DateTimeUtils.fromJavaDate(d), DateType)
4445
case a: Array[Byte] => Literal(a, BinaryType)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,12 +83,14 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
8383
}
8484

8585
test("decimal") {
86-
List(0.0, 1.2, 1.1111, 5).foreach { d =>
86+
List(-0.0001, 0.0, 0.001, 1.2, 1.1111, 5).foreach { d =>
8787
checkEvaluation(Literal(Decimal(d)), Decimal(d))
8888
checkEvaluation(Literal(Decimal(d.toInt)), Decimal(d.toInt))
8989
checkEvaluation(Literal(Decimal(d.toLong)), Decimal(d.toLong))
90-
checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 1)),
91-
Decimal((d * 1000L).toLong, 10, 1))
90+
checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 3)),
91+
Decimal((d * 1000L).toLong, 10, 3))
92+
checkEvaluation(Literal(BigDecimal(d.toString)), Decimal(d))
93+
checkEvaluation(Literal(new java.math.BigDecimal(d.toString)), Decimal(d))
9294
}
9395
}
9496

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1627,6 +1627,16 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
16271627
Row(null))
16281628
}
16291629

1630+
test("precision smaller than scale") {
1631+
checkAnswer(sql("select 10.00"), Row(BigDecimal("10.00")))
1632+
checkAnswer(sql("select 1.00"), Row(BigDecimal("1.00")))
1633+
checkAnswer(sql("select 0.10"), Row(BigDecimal("0.10")))
1634+
checkAnswer(sql("select 0.01"), Row(BigDecimal("0.01")))
1635+
checkAnswer(sql("select 0.001"), Row(BigDecimal("0.001")))
1636+
checkAnswer(sql("select -0.01"), Row(BigDecimal("-0.01")))
1637+
checkAnswer(sql("select -0.001"), Row(BigDecimal("-0.001")))
1638+
}
1639+
16301640
test("external sorting updates peak execution memory") {
16311641
withSQLConf((SQLConf.EXTERNAL_SORT.key, "true")) {
16321642
val sc = sqlContext.sparkContext

0 commit comments

Comments
 (0)