Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2735,7 +2735,11 @@ object TryMakeTimestampNTZExpressionBuilder extends ExpressionBuilder {

// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(year, month, day, hour, min, sec[, timezone]) - Create the current timestamp with local time zone from year, month, day, hour, min, sec and timezone fields. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.",
usage = """
_FUNC_(year, month, day, hour, min, sec[, timezone]) - Create the current timestamp with local time zone from year, month, day, hour, min, sec and (optional) timezone fields. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.

_FUNC_(date, time[, timezone]) - Create a local date-time from date, time and (optional) timezone fields.
""",
arguments = """
Arguments:
* year - the year to represent, from 1 to 9999
Expand All @@ -2747,6 +2751,8 @@ object TryMakeTimestampNTZExpressionBuilder extends ExpressionBuilder {
0 to 60. If the sec argument equals to 60, the seconds field is set
to 0 and 1 minute is added to the final timestamp.
* timezone - the time zone identifier. For example, CET, UTC and etc.
* date - a date to represent, from 0001-01-01 to 9999-12-31
* time - a local time to represent, from 00:00:00 to 23:59:59.999999
""",
examples = """
Examples:
Expand All @@ -2758,14 +2764,27 @@ object TryMakeTimestampNTZExpressionBuilder extends ExpressionBuilder {
2019-07-01 00:00:00
> SELECT _FUNC_(null, 7, 22, 15, 30, 0);
NULL
> SELECT _FUNC_(DATE'2014-12-28', TIME'6:30:45.887');
2014-12-28 06:30:45.887
> SELECT _FUNC_(DATE'2014-12-28', TIME'6:30:45.887', 'CET');
2014-12-27 21:30:45.887
""",
group = "datetime_funcs",
since = "3.4.0")
// scalastyle:on line.size.limit
object MakeTimestampLTZExpressionBuilder extends ExpressionBuilder {
override def build(funcName: String, expressions: Seq[Expression]): Expression = {
val numArgs = expressions.length
if (numArgs == 6 || numArgs == 7) {
if (numArgs == 2 || numArgs == 3) {
// Overload for: date, time[, timezone].
MakeTimestampFromDateTime(
expressions(0),
Some(expressions(1)),
expressions.drop(2).lastOption
)
}
else if (numArgs == 6 || numArgs == 7) {
// Overload for: year, month, day, hour, min, sec[, timezone].
MakeTimestamp(
expressions(0),
expressions(1),
Expand All @@ -2776,7 +2795,7 @@ object MakeTimestampLTZExpressionBuilder extends ExpressionBuilder {
expressions.drop(6).lastOption,
dataType = TimestampType)
} else {
throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(6), numArgs)
throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(2, 6), numArgs)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,87 @@ Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), N
+- OneRowRelation


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678))
-- !query analysis
Project [make_timestamp(make_date(2021, 7, 11, true), Some(make_time(6, 30, cast(45.678 as decimal(16,6)))), None, Some(America/Los_Angeles)) AS make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678))#x]
+- OneRowRelation


-- !query
SELECT make_timestamp_ltz(NULL, TIME'00:00:00')
-- !query analysis
[Analyzer test output redacted due to nondeterminism]


-- !query
SELECT make_timestamp_ltz(DATE'1970-01-01', NULL)
-- !query analysis
[Analyzer test output redacted due to nondeterminism]


-- !query
SELECT make_timestamp_ltz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0')
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"TIMESTAMP_NTZ '2018-11-17 13:33:33'\"",
"inputType" : "\"TIMESTAMP_NTZ\"",
"paramIndex" : "first",
"requiredType" : "\"DATE\"",
"sqlExpr" : "\"make_timestamp(TIMESTAMP_NTZ '2018-11-17 13:33:33', TIME '00:00:00')\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 74,
"fragment" : "make_timestamp_ltz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0')"
} ]
}


-- !query
SELECT make_timestamp_ltz(DATE'2025-06-20', timestamp_ntz'2018-11-17 13:33:33')
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"TIMESTAMP_NTZ '2018-11-17 13:33:33'\"",
"inputType" : "\"TIMESTAMP_NTZ\"",
"paramIndex" : "second",
"requiredType" : "\"TIME\"",
"sqlExpr" : "\"make_timestamp(DATE '2025-06-20', TIMESTAMP_NTZ '2018-11-17 13:33:33')\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 79,
"fragment" : "make_timestamp_ltz(DATE'2025-06-20', timestamp_ntz'2018-11-17 13:33:33')"
} ]
}


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'PST')
-- !query analysis
Project [make_timestamp(make_date(2021, 7, 11, true), Some(make_time(6, 30, cast(45.678 as decimal(16,6)))), Some(PST), Some(America/Los_Angeles)) AS make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678), PST)#x]
+- OneRowRelation


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'CET')
-- !query analysis
Project [make_timestamp(make_date(2021, 7, 11, true), Some(make_time(6, 30, cast(45.678 as decimal(16,6)))), Some(CET), Some(America/Los_Angeles)) AS make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678), CET)#x]
+- OneRowRelation


-- !query
SELECT convert_timezone('Europe/Brussels', timestamp_ltz'2022-03-23 00:00:00 America/Los_Angeles')
-- !query analysis
Expand Down
12 changes: 12 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/timestamp-ltz.sql
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,16 @@ SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678);
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007);

-- TimestampLTZ date/time fields constructor
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678));
-- Handling NULL input.
SELECT make_timestamp_ltz(NULL, TIME'00:00:00');
SELECT make_timestamp_ltz(DATE'1970-01-01', NULL);
-- Handling invalid input.
SELECT make_timestamp_ltz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0');
SELECT make_timestamp_ltz(DATE'2025-06-20', timestamp_ntz'2018-11-17 13:33:33');
-- Optional timezone is ignored.
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'PST');
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'CET');

SELECT convert_timezone('Europe/Brussels', timestamp_ltz'2022-03-23 00:00:00 America/Los_Angeles');
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,98 @@ org.apache.spark.SparkDateTimeException
}


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678))
-- !query schema
struct<make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678)):timestamp>
-- !query output
2021-07-11 06:30:45.678


-- !query
SELECT make_timestamp_ltz(NULL, TIME'00:00:00')
-- !query schema
struct<make_timestamp(NULL, TIME '00:00:00'):timestamp>
-- !query output
NULL


-- !query
SELECT make_timestamp_ltz(DATE'1970-01-01', NULL)
-- !query schema
struct<make_timestamp(DATE '1970-01-01', NULL):timestamp>
-- !query output
NULL


-- !query
SELECT make_timestamp_ltz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"TIMESTAMP_NTZ '2018-11-17 13:33:33'\"",
"inputType" : "\"TIMESTAMP_NTZ\"",
"paramIndex" : "first",
"requiredType" : "\"DATE\"",
"sqlExpr" : "\"make_timestamp(TIMESTAMP_NTZ '2018-11-17 13:33:33', TIME '00:00:00')\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 74,
"fragment" : "make_timestamp_ltz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0')"
} ]
}


-- !query
SELECT make_timestamp_ltz(DATE'2025-06-20', timestamp_ntz'2018-11-17 13:33:33')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"TIMESTAMP_NTZ '2018-11-17 13:33:33'\"",
"inputType" : "\"TIMESTAMP_NTZ\"",
"paramIndex" : "second",
"requiredType" : "\"TIME\"",
"sqlExpr" : "\"make_timestamp(DATE '2025-06-20', TIMESTAMP_NTZ '2018-11-17 13:33:33')\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 79,
"fragment" : "make_timestamp_ltz(DATE'2025-06-20', timestamp_ntz'2018-11-17 13:33:33')"
} ]
}


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'PST')
-- !query schema
struct<make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678), PST):timestamp>
-- !query output
2021-07-11 06:30:45.678


-- !query
SELECT make_timestamp_ltz(make_date(2021, 07, 11), make_time(6, 30, 45.678), 'CET')
-- !query schema
struct<make_timestamp(make_date(2021, 7, 11), make_time(6, 30, 45.678), CET):timestamp>
-- !query output
2021-07-10 21:30:45.678


-- !query
SELECT convert_timezone('Europe/Brussels', timestamp_ltz'2022-03-23 00:00:00 America/Los_Angeles')
-- !query schema
Expand Down