diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala index b7fc46c8edfb2..a7a19e0b135bb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala @@ -93,6 +93,8 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType { (precision - scale) <= (dt.precision - dt.scale) && scale <= dt.scale case dt: IntegralType => isTighterThan(DecimalType.forType(dt)) + case dt: DoubleType => + isTighterThan(DecimalType(precision, scale)) case _ => false } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala index 9fa016146bbd3..6aa9dfc1a39c4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala @@ -43,6 +43,12 @@ class StrictDataTypeWriteCompatibilitySuite extends DataTypeWriteCompatibilityBa } } + test("Check cast: decimalType to doubleType allowed") { + assert(canCast(DecimalType(30, 15).defaultConcreteType, DoubleType)) + assert(canCast(DecimalType(38, 18).defaultConcreteType, DoubleType)) + assert(canCast(DecimalType(38, 6).defaultConcreteType, DoubleType)) + } + test("Check array types: unsafe casts are not allowed") { val arrayOfLong = ArrayType(LongType) val arrayOfInt = ArrayType(IntegerType) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala index cbb3dc250b07f..75ccab2fc0c66 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala @@ -1255,13 +1255,9 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel val df4 = spark.table("h2.test.employee") .filter(($"salary" > 1000d).and($"salary" < 12000d)) - checkFiltersRemoved(df4, ansiMode) - val expectedPlanFragment4 = if (ansiMode) { - "PushedFilters: [SALARY IS NOT NULL, " + - "CAST(SALARY AS double) > 1000.0, CAST(SALARY AS double) < 12000.0], " - } else { - "PushedFilters: [SALARY IS NOT NULL], " - } + checkFiltersRemoved(df4) + val expectedPlanFragment4 = "PushedFilters: [SALARY IS NOT NULL, " + + "SALARY > 1000.00, SALARY < 12000.00], " checkPushedInfo(df4, expectedPlanFragment4) checkAnswer(df4, Seq(Row(1, "amy", 10000, 1000, true), Row(1, "cathy", 9000, 1200, false), Row(2, "david", 10000, 1300, true))) @@ -1274,7 +1270,8 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel "PushedFilters: [DEPT IS NOT NULL, ABS(DEPT - 3) > 1, " + "(COALESCE(CAST(SALARY AS double), BONUS)) > 2000.0]" } else { - "PushedFilters: [DEPT IS NOT NULL]" + "PushedFilters: [DEPT IS NOT NULL, " + + "(COALESCE(CAST(SALARY AS double), BONUS)) > 2000.0]" } checkPushedInfo(df5, expectedPlanFragment5) checkAnswer(df5, Seq(Row(1, "amy", 10000, 1000, true), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index d3c21032d0b32..9dd137ccd0b15 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -625,11 +625,6 @@ class InsertSuite extends DataSourceTest with SharedSparkSession { }.getMessage assert(msg.contains("Cannot safely cast 'i': bigint to int")) - msg = intercept[AnalysisException] { - sql("insert into t select 1, 2.0") - }.getMessage - assert(msg.contains("Cannot safely cast 'd': decimal(2,1) to double")) - msg = intercept[AnalysisException] { sql("insert into t select 1, 2.0D, 3") }.getMessage