diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index e5ac5a2f79767..4455b8a4ee3a1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -359,23 +359,23 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output -2078 hours 48 minutes 47.654322 seconds +86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct +struct -- !query output --2078 hours -48 minutes -47.654322 seconds +-86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct +struct -- !query output NULL @@ -383,7 +383,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output NULL @@ -625,7 +625,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct +struct -- !query output NULL @@ -633,9 +633,9 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct +struct -- !query output -3 days +3 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 217eb2b7235a2..7ff52139008c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -5,9 +5,9 @@ -- !query select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') -- !query schema -struct +struct -- !query output -30 hours 33 minutes 36.003006 seconds +1 06:33:36.003006000 -- !query @@ -21,9 +21,9 @@ struct +struct -- !query output -16 hours +0 16:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index 2ee1bc724ad4d..28f899cc78097 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -194,7 +194,7 @@ View Text select * from char_tbl View Original Text select * from char_tbl View Catalog and Namespace spark_catalog.default View Query Output Columns [c, v] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]] -- !query @@ -343,7 +343,7 @@ View Text select * from char_tbl2 View Original Text select * from char_tbl2 View Catalog and Namespace spark_catalog.default View Query Output Columns [c, v] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]] -- !query @@ -402,7 +402,7 @@ View Text select * from char_tbl2 View Original Text select * from char_tbl2 View Catalog and Namespace spark_catalog.default View Query Output Columns [c, v] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true, yes=no] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], yes=no] -- !query @@ -460,7 +460,7 @@ View Text select * from char_tbl2 View Original Text select * from char_tbl2 View Catalog and Namespace spark_catalog.default View Query Output Columns [c, v] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=c, view.query.out.col.1=v, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]] -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 0add0748bb3dc..ad312b20a9f15 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -336,23 +336,23 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output -2078 hours 48 minutes 47.654322 seconds +86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct +struct -- !query output --2078 hours -48 minutes -47.654322 seconds +-86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct +struct -- !query output NULL @@ -360,7 +360,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output NULL @@ -602,7 +602,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct +struct -- !query output NULL @@ -610,9 +610,9 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct +struct -- !query output -3 days +3 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index fadfd72ff722e..e93f0c8439efb 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -336,23 +336,23 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output -2078 hours 48 minutes 47.654322 seconds +86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct +struct -- !query output --2078 hours -48 minutes -47.654322 seconds +-86 13:48:47.654322000 -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct +struct -- !query output NULL @@ -360,7 +360,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct +struct -- !query output NULL @@ -602,7 +602,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct +struct -- !query output NULL @@ -610,9 +610,9 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct +struct -- !query output -3 days +3 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index 09430532733ec..0da70b0ba2b94 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -477,7 +477,7 @@ View Text SELECT * FROM t View Original Text SELECT * FROM t View Catalog and Namespace spark_catalog.default View Query Output Columns [a, b, c, d] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]] -- !query @@ -501,7 +501,7 @@ View Text SELECT * FROM t View Original Text SELECT * FROM t View Catalog and Namespace spark_catalog.default View Query Output Columns [a, b, c, d] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=default, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.col.2=c, view.query.out.col.3=d, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[]] -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 4d663c742b2bd..43efa08296db0 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -5,9 +5,9 @@ -- !query select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') -- !query schema -struct +struct -- !query output -30 hours 33 minutes 36.003006 seconds +1 06:33:36.003006000 -- !query @@ -21,9 +21,9 @@ struct +struct -- !query output -16 hours +0 16:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 056c961e911f9..3b383c0f42719 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -257,7 +257,7 @@ View Text SELECT * FROM base_table View Original Text SELECT * FROM base_table View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -313,7 +313,7 @@ View Text SELECT * FROM base_table View Original Text SELECT * FROM base_table View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -359,7 +359,7 @@ View Original Text SELECT t1.a AS t1_a, t2.a AS t2_a WHERE t1.id = t2.id View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [t1_a, t2_a] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=t1_a, view.query.out.col.1=t2_a, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=t1_a, view.query.out.col.1=t2_a, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -413,7 +413,7 @@ View Text SELECT * FROM base_table WHERE id IN (SELECT id FROM base_t View Original Text SELECT * FROM base_table WHERE id IN (SELECT id FROM base_table2) View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -443,7 +443,7 @@ View Text SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM base_ View Original Text SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM base_table2) t2 View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [id, a] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=id, view.query.out.col.1=a, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=id, view.query.out.col.1=a, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -473,7 +473,7 @@ View Text SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM base_t View Original Text SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM base_table2) View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -503,7 +503,7 @@ View Text SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM ba View Original Text SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM base_table2) View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -533,7 +533,7 @@ View Text SELECT * FROM base_table WHERE EXISTS (SELECT 1) View Original Text SELECT * FROM base_table WHERE EXISTS (SELECT 1) View Catalog and Namespace spark_catalog.temp_view_test View Query Output Columns [a, id] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=temp_view_test, view.query.out.col.0=a, view.query.out.col.1=id, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -669,7 +669,7 @@ View Text SELECT * FROM t1 CROSS JOIN t2 View Original Text SELECT * FROM t1 CROSS JOIN t2 View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [num, name, num2, value] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -710,7 +710,7 @@ View Text SELECT * FROM t1 INNER JOIN t2 ON t1.num = t2.num2 View Original Text SELECT * FROM t1 INNER JOIN t2 ON t1.num = t2.num2 View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [num, name, num2, value] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -751,7 +751,7 @@ View Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 View Original Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [num, name, num2, value] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -792,7 +792,7 @@ View Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 AND t2.va View Original Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 AND t2.value = 'xxx' View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [num, name, num2, value] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=num, view.query.out.col.1=name, view.query.out.col.2=num2, view.query.out.col.3=value, view.query.out.numCols=4, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -894,7 +894,7 @@ BETWEEN (SELECT d FROM tbl2 WHERE c = 1) AND (SELECT e FROM tbl3 WHERE f = 2) AND EXISTS (SELECT g FROM tbl4 LEFT JOIN tbl3 ON tbl4.h = tbl3.f) View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [a, b] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query @@ -933,7 +933,7 @@ AND EXISTS (SELECT g FROM tbl4 LEFT JOIN tbl3 ON tbl4.h = tbl3.f) AND NOT EXISTS (SELECT g FROM tbl4 LEFT JOIN tmptbl ON tbl4.h = tmptbl.j) View Catalog and Namespace spark_catalog.testviewschm2 View Query Output Columns [a, b] -Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true, view.sqlConfig.spark.sql.legacy.interval.enabled=true] +Table Properties [view.catalogAndNamespace.numParts=2, view.catalogAndNamespace.part.0=spark_catalog, view.catalogAndNamespace.part.1=testviewschm2, view.query.out.col.0=a, view.query.out.col.1=b, view.query.out.numCols=2, view.referredTempFunctionsNames=[], view.referredTempViewNames=[], view.sqlConfig.spark.sql.ansi.enabled=true] -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index a959284750483..7c8da456be2b1 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -482,93 +482,93 @@ SELECT date '5874898-01-01' -- !query SELECT f1 - date '2000-01-01' AS `Days From 2K` FROM DATE_TBL -- !query schema -struct --- !query output --2 years -10 months --2 years -10 months -1 days --2 years -9 months -30 days --3 years -10 months --3 years -10 months -1 days --3 years -10 months -2 days --3 years -9 months -30 days --42 years -6 months -18 days --42 years -8 months -22 days -3 months -3 months 1 days -3 months 2 days -38 years 3 months 7 days -39 years 3 months 8 days -40 years 3 months 9 days +struct +-- !query output +-1035 00:00:00.000000000 +-1036 00:00:00.000000000 +-1037 00:00:00.000000000 +-1400 00:00:00.000000000 +-1401 00:00:00.000000000 +-1402 00:00:00.000000000 +-1403 00:00:00.000000000 +-15542 00:00:00.000000000 +-15607 00:00:00.000000000 +13977 00:00:00.000000000 +14343 00:00:00.000000000 +14710 00:00:00.000000000 +91 00:00:00.000000000 +92 00:00:00.000000000 +93 00:00:00.000000000 -- !query SELECT f1 - date 'epoch' AS `Days From Epoch` FROM DATE_TBL -- !query schema -struct +struct -- !query output --12 years -6 months -18 days --12 years -8 months -22 days -26 years 1 months 27 days -26 years 1 months 28 days -26 years 2 months -26 years 2 months 1 days -27 years 1 months 27 days -27 years 2 months -27 years 2 months 1 days -30 years 3 months -30 years 3 months 1 days -30 years 3 months 2 days -68 years 3 months 7 days -69 years 3 months 8 days -70 years 3 months 9 days +-4585 00:00:00.000000000 +-4650 00:00:00.000000000 +11048 00:00:00.000000000 +11049 00:00:00.000000000 +11050 00:00:00.000000000 +24934 00:00:00.000000000 +25300 00:00:00.000000000 +25667 00:00:00.000000000 +9554 00:00:00.000000000 +9555 00:00:00.000000000 +9556 00:00:00.000000000 +9557 00:00:00.000000000 +9920 00:00:00.000000000 +9921 00:00:00.000000000 +9922 00:00:00.000000000 -- !query SELECT date 'yesterday' - date 'today' AS `One day` -- !query schema -struct +struct -- !query output --1 days +-1 00:00:00.000000000 -- !query SELECT date 'today' - date 'tomorrow' AS `One day` -- !query schema -struct +struct -- !query output --1 days +-1 00:00:00.000000000 -- !query SELECT date 'yesterday' - date 'tomorrow' AS `Two days` -- !query schema -struct +struct -- !query output --2 days +-2 00:00:00.000000000 -- !query SELECT date 'tomorrow' - date 'today' AS `One day` -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query SELECT date 'today' - date 'yesterday' AS `One day` -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query SELECT date 'tomorrow' - date 'yesterday' AS `Two days` -- !query schema -struct +struct -- !query output -2 days +2 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/timestamp.sql.out index 68d2b5c9ce1bf..d4293a24c6f16 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/timestamp.sql.out @@ -217,13 +217,13 @@ struct<49:string,d1:timestamp> SELECT '' AS `54`, d1 - timestamp '1997-01-02' AS diff FROM TIMESTAMP_TBL WHERE d1 BETWEEN '1902-01-01' AND '2038-01-01' -- !query schema -struct<54:string,diff:interval> +struct<54:string,diff:day-time interval> -- !query output - -236720 hours - 0 seconds - 3 hours 4 minutes 5 seconds - 41393 hours 19 minutes 20 seconds - 953 hours 32 minutes 1 seconds + -9863 08:00:00.000000000 + 0 00:00:00.000000000 + 0 03:04:05.000000000 + 1724 18:19:20.000000000 + 39 17:32:01.000000000 -- !query @@ -240,13 +240,13 @@ SELECT '' AS `54`, d1 - timestamp '1997-01-02' AS diff WHERE d1 BETWEEN timestamp '1902-01-01' AND timestamp '2038-01-01' -- !query schema -struct<54:string,diff:interval> +struct<54:string,diff:day-time interval> -- !query output - -236720 hours - 0 seconds - 3 hours 4 minutes 5 seconds - 41393 hours 19 minutes 20 seconds - 953 hours 32 minutes 1 seconds + -9863 08:00:00.000000000 + 0 00:00:00.000000000 + 0 03:04:05.000000000 + 1724 18:19:20.000000000 + 39 17:32:01.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out index 1a0a66773d8f7..1008f9af0deb6 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out @@ -66,7 +66,6 @@ view.query.out.col.0 c1 view.query.out.numCols 1 view.referredTempFunctionsNames [] view.referredTempViewNames [] -view.sqlConfig.spark.sql.legacy.interval.enabled true -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index a2ad530f2d9af..56c45faf633d2 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -204,7 +204,7 @@ cannot resolve 'subtracttimestamps('1', CAST('2017-12-11 09:30:00.0' AS TIMESTAM -- !query SELECT '1' - cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct +struct -- !query output NULL diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index ddd290f4f9794..7c414c075f7c4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -372,8 +372,6 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) case _ => } - // TODO(SPARK-34905): Enable ANSI intervals in SQLQueryTestSuite - localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true) if (configSet.nonEmpty) { // Execute the list of set operation in order to add the desired configs diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index dd008f738e6eb..4a87be5f61195 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -97,8 +97,6 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ case _ => statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false") } - // TODO(SPARK-34905): Enable ANSI intervals in ThriftServerQueryTestSuite - statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true") // Run the SQL queries preparing them for comparison. val outputs: Seq[QueryOutput] = queries.map { sql =>