diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 89a6d23b1d73..592b9de83d9a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2053,10 +2053,15 @@ case class DatePart(field: Expression, source: Expression, child: Expression) if (!field.foldable) { throw new AnalysisException("The field parameter needs to be a foldable string value.") } - val fieldStr = field.eval().asInstanceOf[UTF8String].toString - DatePart.parseExtractField(fieldStr, source, { - throw new AnalysisException(s"Literals of type '$fieldStr' are currently not supported.") - }) + val fieldEval = field.eval() + if (fieldEval == null) { + Literal(null, DoubleType) + } else { + val fieldStr = fieldEval.asInstanceOf[UTF8String].toString + DatePart.parseExtractField(fieldStr, source, { + throw new AnalysisException(s"Literals of type '$fieldStr' are currently not supported.") + }) + } }) } diff --git a/sql/core/src/test/resources/sql-tests/inputs/date_part.sql b/sql/core/src/test/resources/sql-tests/inputs/date_part.sql index cb3d96628100..fd0fb50f7146 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/date_part.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/date_part.sql @@ -66,3 +66,5 @@ select date_part('secs', c) from t; select date_part('not_supported', c) from t; select date_part(c, c) from t; + +select date_part(null, c) from t; diff --git a/sql/core/src/test/resources/sql-tests/results/date_part.sql.out b/sql/core/src/test/resources/sql-tests/results/date_part.sql.out index c59dfdbd3da3..776786850e9d 100644 --- a/sql/core/src/test/resources/sql-tests/results/date_part.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date_part.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 51 +-- Number of queries: 52 -- !query 0 @@ -410,3 +410,11 @@ struct<> -- !query 50 output org.apache.spark.sql.AnalysisException The field parameter needs to be a foldable string value.;; line 1 pos 7 + + +-- !query 51 +select date_part(null, c) from t +-- !query 51 schema +struct +-- !query 51 output +NULL diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index 2fef05f97e57..99189a96b299 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.sql.types.{DoubleType, StructField, StructType} import org.apache.spark.unsafe.types.CalendarInterval class DateFunctionsSuite extends QueryTest with SharedSparkSession { @@ -796,4 +797,13 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { Seq(Row(Instant.parse(timestamp)))) } } + + test("handling null field by date_part") { + val input = Seq(Date.valueOf("2019-09-20")).toDF("d") + Seq("date_part(null, d)", "date_part(null, date'2019-09-20')").foreach { expr => + val df = input.selectExpr(expr) + assert(df.schema.headOption.get.dataType == DoubleType) + checkAnswer(df, Row(null)) + } + } }