Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -359,31 +359,31 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have we finalized the SQL name for the new interval types? How about other databases?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not yet. So far, I took the names for the sub-types from the SQL standard, see #31810

Probably, we will need to re-define them when we will implement parsing of interval types from SQL.

-- !query output
2078 hours 48 minutes 47.654322 seconds
86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval>
-- !query output
-2078 hours -48 minutes -47.654322 seconds
-86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval>
-- !query output
NULL


-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
-- !query output
NULL

Expand Down Expand Up @@ -625,17 +625,17 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
select null - date '2019-10-06'
-- !query schema
struct<subtractdates(NULL, DATE '2019-10-06'):interval>
struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval>
-- !query output
NULL


-- !query
select date '2001-10-01' - date '2001-09-28'
-- !query schema
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):interval>
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval>
-- !query output
3 days
3 00:00:00.000000000


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
-- !query
select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15')
-- !query schema
struct<multiply_interval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):interval>
struct<multiplydtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):day-time interval>
-- !query output
30 hours 33 minutes 36.003006 seconds
1 06:33:36.003006000


-- !query
Expand All @@ -21,9 +21,9 @@ struct<multiply_interval(INTERVAL '4 months 14 days 0.000003 seconds', 1.5):inte
-- !query
select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5
-- !query schema
struct<divide_interval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):interval>
struct<dividedtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):day-time interval>
-- !query output
16 hours
0 16:00:00.000000000


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ View Text select * from char_tbl
View Original Text select * from char_tbl
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]]


-- !query
Expand Down Expand Up @@ -343,7 +343,7 @@ View Text select * from char_tbl2
View Original Text select * from char_tbl2
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]]


-- !query
Expand Down Expand Up @@ -402,7 +402,7 @@ View Text select * from char_tbl2
View Original Text select * from char_tbl2
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true, yes=no]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], yes=no]


-- !query
Expand Down Expand Up @@ -460,7 +460,7 @@ View Text select * from char_tbl2
View Original Text select * from char_tbl2
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]]


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,31 +336,31 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
-- !query output
2078 hours 48 minutes 47.654322 seconds
86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval>
-- !query output
-2078 hours -48 minutes -47.654322 seconds
-86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval>
-- !query output
NULL


-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
-- !query output
NULL

Expand Down Expand Up @@ -602,17 +602,17 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
select null - date '2019-10-06'
-- !query schema
struct<subtractdates(NULL, DATE '2019-10-06'):interval>
struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval>
-- !query output
NULL


-- !query
select date '2001-10-01' - date '2001-09-28'
-- !query schema
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):interval>
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval>
-- !query output
3 days
3 00:00:00.000000000


-- !query
Expand Down
18 changes: 9 additions & 9 deletions sql/core/src/test/resources/sql-tests/results/datetime.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -336,31 +336,31 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
-- !query output
2078 hours 48 minutes 47.654322 seconds
86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval>
-- !query output
-2078 hours -48 minutes -47.654322 seconds
-86 13:48:47.654322000


-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):interval>
struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval>
-- !query output
NULL


-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):interval>
struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval>
-- !query output
NULL

Expand Down Expand Up @@ -602,17 +602,17 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
select null - date '2019-10-06'
-- !query schema
struct<subtractdates(NULL, DATE '2019-10-06'):interval>
struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval>
-- !query output
NULL


-- !query
select date '2001-10-01' - date '2001-09-28'
-- !query schema
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):interval>
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval>
-- !query output
3 days
3 00:00:00.000000000


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ View Text SELECT * FROM t
View Original Text SELECT * FROM t
View Catalog and Namespace spark_catalog.default
View Query Output Columns [a, b, c, d]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]]


-- !query
Expand All @@ -501,7 +501,7 @@ View Text SELECT * FROM t
View Original Text SELECT * FROM t
View Catalog and Namespace spark_catalog.default
View Query Output Columns [a, b, c, d]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true]
Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]]


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
-- !query
select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15')
-- !query schema
struct<multiply_interval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):interval>
struct<multiplydtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):day-time interval>
-- !query output
30 hours 33 minutes 36.003006 seconds
1 06:33:36.003006000


-- !query
Expand All @@ -21,9 +21,9 @@ struct<multiply_interval(INTERVAL '4 months 14 days 0.000003 seconds', 1.5):inte
-- !query
select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5
-- !query schema
struct<divide_interval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):interval>
struct<dividedtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):day-time interval>
-- !query output
16 hours
0 16:00:00.000000000


-- !query
Expand Down
Loading