From 22c46a95f10ece3b3f14bd1feca848a0c047c2c5 Mon Sep 17 00:00:00 2001 From: GuoPhilipse <46367746+GuoPhilipse@users.noreply.github.com> Date: Mon, 18 May 2020 20:32:39 +0800 Subject: [PATCH] Add compatibility flag to cast long to timestamp As we know,long datatype is interpreted as milliseconds when conversion to timestamp in hive, while long is interpreted as seconds when conversion to timestamp in spark, we have been facing error data during migrating hive sql to spark sql. with compatibility flag we can fix this error, --- .../org/apache/spark/sql/internal/SQLConf.scala | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 760a9db8bead0..08467496dd30f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1591,6 +1591,16 @@ object SQLConf { .doc("When true, use legacy MySqlServer SMALLINT and REAL type mapping.") .booleanConf .createWithDefault(false) + + val LONG_TIMESTAMP_CONVERSION_IN_SECONDS = + buildConf("spark.sql.legacy.longTimestampConversionInSeconds") + .internal() + .doc("When false, Byte/Short/Int/Long value is interpreted as milliseconds " + + "during the timestamp conversion ." + + "when true, the value will be interpreted as seconds " + + "to be consistent with decimal/double. ") + .booleanConf + .createWithDefault(true) } /** @@ -2004,6 +2014,8 @@ class SQLConf extends Serializable with Logging { def legacyMsSqlServerNumericMappingEnabled: Boolean = getConf(LEGACY_MSSQLSERVER_NUMERIC_MAPPING_ENABLED) + def longTimestampConversionInSeconds: Boolean = getConf(LONG_TIMESTAMP_CONVERSION_IN_SECONDS) + /** ********************** SQLConf functionality methods ************ */ /** Set Spark SQL configuration properties. */