diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index cc273fd36011..c4432e580ab8 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -49,6 +49,11 @@ grammar SqlBase; * When true, the behavior of keywords follows ANSI SQL standard. */ public boolean SQL_standard_keyword_behavior = false; + + /** + * When true, an INTERVAL keyword can be optional for interval values in SQL statements. + */ + public boolean optional_interval_prefix = false; } singleStatement @@ -788,7 +793,7 @@ booleanValue interval : INTERVAL (errorCapturingMultiUnitsInterval | errorCapturingUnitToUnitInterval)? - | {SQL_standard_keyword_behavior}? (errorCapturingMultiUnitsInterval | errorCapturingUnitToUnitInterval) + | {optional_interval_prefix}? (errorCapturingMultiUnitsInterval | errorCapturingUnitToUnitInterval) ; errorCapturingMultiUnitsInterval diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index 30c36598d81d..825a583e9e36 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -96,11 +96,21 @@ abstract class AbstractSqlParser(conf: SQLConf) extends ParserInterface with Log case Dialect.SPARK => conf.dialectSparkAnsiEnabled } + // PostgreSQL cannot make INTERVAL keywords optional. + // In Spark dialect with setting `spark.sql.dialect.spark.ansi.enabled=true` and + // `spark.sql.parser.optionalIntervalPrefix=true`, the parser can omit + // INTERVAL keywords in SQL statements. + val optionalIntervalPrefix = conf.dialect match { + case Dialect.POSTGRESQL => false + case Dialect.SPARK => conf.dialectSparkAnsiEnabled && conf.optionalIntervalPrefix + } + val lexer = new SqlBaseLexer(new UpperCaseCharStream(CharStreams.fromString(command))) lexer.removeErrorListeners() lexer.addErrorListener(ParseErrorListener) lexer.legacy_setops_precedence_enbled = conf.setOpsPrecedenceEnforced lexer.SQL_standard_keyword_behavior = SQLStandardKeywordBehavior + lexer.optional_interval_prefix = optionalIntervalPrefix val tokenStream = new CommonTokenStream(lexer) val parser = new SqlBaseParser(tokenStream) @@ -109,6 +119,7 @@ abstract class AbstractSqlParser(conf: SQLConf) extends ParserInterface with Log parser.addErrorListener(ParseErrorListener) parser.legacy_setops_precedence_enbled = conf.setOpsPrecedenceEnforced parser.SQL_standard_keyword_behavior = SQLStandardKeywordBehavior + parser.optional_interval_prefix = optionalIntervalPrefix try { try { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 74046cd91c96..37f0762dd086 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1787,6 +1787,13 @@ object SQLConf { val SQL_STANDARD, ISO_8601, MULTI_UNITS = Value } + val OPTIONAL_INTERVAL_PREFIX = + buildConf("spark.sql.parser.optionalIntervalPrefix") + .doc("When true, an INTERVAL keyword can be optional for interval values in SQL statements," + + " e.g., 'SELECT 1 day'.") + .booleanConf + .createWithDefault(false) + val INTERVAL_STYLE = buildConf("spark.sql.intervalOutputStyle") .doc("When converting interval values to strings (i.e. for display), this config decides the" + " interval string format. The value SQL_STANDARD will produce output matching SQL standard" + @@ -2513,6 +2520,8 @@ class SQLConf extends Serializable with Logging { def storeAssignmentPolicy: StoreAssignmentPolicy.Value = StoreAssignmentPolicy.withName(getConf(STORE_ASSIGNMENT_POLICY)) + def optionalIntervalPrefix: Boolean = getConf(OPTIONAL_INTERVAL_PREFIX) + def intervalOutputStyle: IntervalStyle.Value = IntervalStyle.withName(getConf(INTERVAL_STYLE)) def dialect: Dialect.Value = Dialect.withName(getConf(DIALECT)) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 371b702722a6..3c40c9fba36c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -615,8 +615,10 @@ class ExpressionParserSuite extends AnalysisTest { ).foreach { case (sign, expectedLiteral) => assertEqual(s"${sign}interval $intervalValue", expectedLiteral) - // SPARK-23264 Support interval values without INTERVAL clauses if ANSI SQL enabled - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + // Checks if we can make INTERVAL optional + withSQLConf( + SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true", + SQLConf.OPTIONAL_INTERVAL_PREFIX.key -> "true") { assertEqual(intervalValue, expected) } } @@ -703,17 +705,23 @@ class ExpressionParserSuite extends AnalysisTest { test("SPARK-23264 Interval Compatibility tests") { def checkIntervals(intervalValue: String, expected: Literal): Unit = { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf( + SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true", + SQLConf.OPTIONAL_INTERVAL_PREFIX.key -> "true") { assertEqual(intervalValue, expected) } // Compatibility tests: If ANSI SQL disabled, `intervalValue` should be parsed as an alias - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { - val aliases = defaultParser.parseExpression(intervalValue).collect { - case a @ Alias(_: Literal, name) - if intervalUnits.exists { unit => name.startsWith(unit.toString) } => a + Seq("true", "false").foreach { optionalIntervalPrefix => + withSQLConf( + SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false", + SQLConf.OPTIONAL_INTERVAL_PREFIX.key -> optionalIntervalPrefix) { + val aliases = defaultParser.parseExpression(intervalValue).collect { + case a @ Alias(_: Literal, name) + if intervalUnits.exists { unit => name.startsWith(unit.toString) } => a + } + assert(aliases.size === 1) } - assert(aliases.size === 1) } } val forms = Seq("", "s") diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql index 087914eebb07..beb851df9463 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql @@ -1,17 +1,6 @@ +--SET spark.sql.parser.optionalIntervalPrefix=false --IMPORT interval.sql --- the `interval` keyword can be omitted with ansi mode -select 1 year 2 days; -select '10-9' year to month; -select '20 15:40:32.99899999' day to second; -select 30 day day; -select date'2012-01-01' - '2-2' year to month; -select 1 month - 1 day; - --- malformed interval literal with ansi mode -select 1 year to month; -select '1' year to second; -select 1 year '2-1' year to month; -select (-30) day; -select (a + 1) day; -select 30 day day day; \ No newline at end of file +-- Cannot make INTERVAL keywords optional with the ANSI mode enabled and +-- `spark.sql.parser.optionalIntervalPrefix=false`. +--IMPORT ansi/optional-interval.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/optional-interval.sql b/sql/core/src/test/resources/sql-tests/inputs/ansi/optional-interval.sql new file mode 100644 index 000000000000..08d720d2cbe5 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/ansi/optional-interval.sql @@ -0,0 +1,20 @@ +--SET spark.sql.parser.optionalIntervalPrefix=true +--IMPORT interval.sql + +-- the `interval` keyword can be omitted with ansi mode enabled and +-- `spark.sql.parser.optionalIntervalPrefix=true`. +select 1 year 2 days; +select '10-9' year to month; +select '20 15:40:32.99899999' day to second; +select 30 day day; +select date'2012-01-01' - '2-2' year to month; +select 1 month - 1 day; + +-- malformed interval literal with ansi mode enabled and +-- `spark.sql.parser.optionalIntervalPrefix=true`. +select 1 year to month; +select '1' year to second; +select 1 year '2-1' year to month; +select (-30) day; +select (a + 1) day; +select 30 day day day; \ No newline at end of file diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index bceb6bd1d2ea..bc66c9a2612a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -1118,25 +1118,43 @@ struct<(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' + INTERVAL '10 -- !query 118 select 1 year 2 days -- !query 118 schema -struct +struct<> -- !query 118 output -1 years 2 days +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input '1'(line 1, pos 7) + +== SQL == +select 1 year 2 days +-------^^^ -- !query 119 select '10-9' year to month -- !query 119 schema -struct +struct<> -- !query 119 output -10 years 9 months +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input ''10-9''(line 1, pos 7) + +== SQL == +select '10-9' year to month +-------^^^ -- !query 120 select '20 15:40:32.99899999' day to second -- !query 120 schema -struct +struct<> -- !query 120 output -20 days 15 hours 40 minutes 32.998999 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input ''20 15:40:32.99899999''(line 1, pos 7) + +== SQL == +select '20 15:40:32.99899999' day to second +-------^^^ -- !query 121 @@ -1146,27 +1164,39 @@ struct<> -- !query 121 output org.apache.spark.sql.catalyst.parser.ParseException -no viable alternative at input 'day'(line 1, pos 14) +no viable alternative at input '30'(line 1, pos 7) == SQL == select 30 day day ---------------^^^ +-------^^^ -- !query 122 select date'2012-01-01' - '2-2' year to month -- !query 122 schema -struct +struct<> -- !query 122 output -2009-11-01 +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input ''2-2''(line 1, pos 26) + +== SQL == +select date'2012-01-01' - '2-2' year to month +--------------------------^^^ -- !query 123 select 1 month - 1 day -- !query 123 schema -struct +struct<> -- !query 123 output -1 months -1 days +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input '1'(line 1, pos 7) + +== SQL == +select 1 month - 1 day +-------^^^ -- !query 124 @@ -1176,7 +1206,7 @@ struct<> -- !query 124 output org.apache.spark.sql.catalyst.parser.ParseException -The value of from-to unit must be a string(line 1, pos 7) +no viable alternative at input '1'(line 1, pos 7) == SQL == select 1 year to month @@ -1190,7 +1220,7 @@ struct<> -- !query 125 output org.apache.spark.sql.catalyst.parser.ParseException -Intervals FROM year TO second are not supported.(line 1, pos 7) +no viable alternative at input ''1''(line 1, pos 7) == SQL == select '1' year to second @@ -1204,11 +1234,11 @@ struct<> -- !query 126 output org.apache.spark.sql.catalyst.parser.ParseException -Can only have a single from-to unit in the interval literal syntax(line 1, pos 14) +no viable alternative at input '1'(line 1, pos 7) == SQL == select 1 year '2-1' year to month ---------------^^^ +-------^^^ -- !query 127 @@ -1246,8 +1276,8 @@ struct<> -- !query 129 output org.apache.spark.sql.catalyst.parser.ParseException -no viable alternative at input 'day'(line 1, pos 14) +no viable alternative at input '30'(line 1, pos 7) == SQL == select 30 day day day ---------------^^^ +-------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/optional-interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/optional-interval.sql.out new file mode 100644 index 000000000000..bceb6bd1d2ea --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/ansi/optional-interval.sql.out @@ -0,0 +1,1253 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 130 + + +-- !query 0 +select interval '1 day' > interval '23 hour' +-- !query 0 schema +struct<(INTERVAL '1 days' > INTERVAL '23 hours'):boolean> +-- !query 0 output +true + + +-- !query 1 +select interval '-1 day' >= interval '-23 hour' +-- !query 1 schema +struct<(INTERVAL '-1 days' >= INTERVAL '-23 hours'):boolean> +-- !query 1 output +false + + +-- !query 2 +select interval '-1 day' > null +-- !query 2 schema +struct<(INTERVAL '-1 days' > CAST(NULL AS INTERVAL)):boolean> +-- !query 2 output +NULL + + +-- !query 3 +select null > interval '-1 day' +-- !query 3 schema +struct<(CAST(NULL AS INTERVAL) > INTERVAL '-1 days'):boolean> +-- !query 3 output +NULL + + +-- !query 4 +select interval '1 minutes' < interval '1 hour' +-- !query 4 schema +struct<(INTERVAL '1 minutes' < INTERVAL '1 hours'):boolean> +-- !query 4 output +true + + +-- !query 5 +select interval '-1 day' <= interval '-23 hour' +-- !query 5 schema +struct<(INTERVAL '-1 days' <= INTERVAL '-23 hours'):boolean> +-- !query 5 output +true + + +-- !query 6 +select interval '1 year' = interval '360 days' +-- !query 6 schema +struct<(INTERVAL '1 years' = INTERVAL '360 days'):boolean> +-- !query 6 output +true + + +-- !query 7 +select interval '1 year 2 month' = interval '420 days' +-- !query 7 schema +struct<(INTERVAL '1 years 2 months' = INTERVAL '420 days'):boolean> +-- !query 7 output +true + + +-- !query 8 +select interval '1 year' = interval '365 days' +-- !query 8 schema +struct<(INTERVAL '1 years' = INTERVAL '365 days'):boolean> +-- !query 8 output +false + + +-- !query 9 +select interval '1 month' = interval '30 days' +-- !query 9 schema +struct<(INTERVAL '1 months' = INTERVAL '30 days'):boolean> +-- !query 9 output +true + + +-- !query 10 +select interval '1 minutes' = interval '1 hour' +-- !query 10 schema +struct<(INTERVAL '1 minutes' = INTERVAL '1 hours'):boolean> +-- !query 10 output +false + + +-- !query 11 +select interval '1 minutes' = null +-- !query 11 schema +struct<(INTERVAL '1 minutes' = CAST(NULL AS INTERVAL)):boolean> +-- !query 11 output +NULL + + +-- !query 12 +select null = interval '-1 day' +-- !query 12 schema +struct<(CAST(NULL AS INTERVAL) = INTERVAL '-1 days'):boolean> +-- !query 12 output +NULL + + +-- !query 13 +select interval '1 minutes' <=> null +-- !query 13 schema +struct<(INTERVAL '1 minutes' <=> CAST(NULL AS INTERVAL)):boolean> +-- !query 13 output +false + + +-- !query 14 +select null <=> interval '1 minutes' +-- !query 14 schema +struct<(CAST(NULL AS INTERVAL) <=> INTERVAL '1 minutes'):boolean> +-- !query 14 output +false + + +-- !query 15 +select INTERVAL '9 years 1 months -1 weeks -4 days -10 hours -46 minutes' > interval '1 minutes' +-- !query 15 schema +struct<(INTERVAL '9 years 1 months -11 days -10 hours -46 minutes' > INTERVAL '1 minutes'):boolean> +-- !query 15 output +true + + +-- !query 16 +select cast(v as interval) i from VALUES ('1 seconds'), ('4 seconds'), ('3 seconds') t(v) order by i +-- !query 16 schema +struct +-- !query 16 output +1 seconds +3 seconds +4 seconds + + +-- !query 17 +select interval '1 month 120 days' > interval '2 month' +-- !query 17 schema +struct<(INTERVAL '1 months 120 days' > INTERVAL '2 months'):boolean> +-- !query 17 output +true + + +-- !query 18 +select interval '1 month 30 days' = interval '2 month' +-- !query 18 schema +struct<(INTERVAL '1 months 30 days' = INTERVAL '2 months'):boolean> +-- !query 18 output +true + + +-- !query 19 +select interval '1 month 29 days 40 hours' > interval '2 month' +-- !query 19 schema +struct<(INTERVAL '1 months 29 days 40 hours' > INTERVAL '2 months'):boolean> +-- !query 19 output +true + + +-- !query 20 +select max(cast(v as interval)) from VALUES ('1 seconds'), ('4 seconds'), ('3 seconds') t(v) +-- !query 20 schema +struct +-- !query 20 output +4 seconds + + +-- !query 21 +select min(cast(v as interval)) from VALUES ('1 seconds'), ('4 seconds'), ('3 seconds') t(v) +-- !query 21 schema +struct +-- !query 21 output +1 seconds + + +-- !query 22 +select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') +-- !query 22 schema +struct +-- !query 22 output +30 hours 33 minutes 36.003006 seconds + + +-- !query 23 +select interval 4 month 2 weeks 3 microseconds * 1.5 +-- !query 23 schema +struct +-- !query 23 output +6 months 21 days 0.000005 seconds + + +-- !query 24 +select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5 +-- !query 24 schema +struct +-- !query 24 output +16 hours + + +-- !query 25 +select interval '2 seconds' / 0 +-- !query 25 schema +struct +-- !query 25 output +NULL + + +-- !query 26 +select interval '2 seconds' / null +-- !query 26 schema +struct +-- !query 26 output +NULL + + +-- !query 27 +select interval '2 seconds' * null +-- !query 27 schema +struct +-- !query 27 output +NULL + + +-- !query 28 +select null * interval '2 seconds' +-- !query 28 schema +struct +-- !query 28 output +NULL + + +-- !query 29 +select -interval '-1 month 1 day -1 second' +-- !query 29 schema +struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +-- !query 29 output +1 months -1 days 1 seconds + + +-- !query 30 +select -interval -1 month 1 day -1 second +-- !query 30 schema +struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +-- !query 30 output +1 months -1 days 1 seconds + + +-- !query 31 +select +interval '-1 month 1 day -1 second' +-- !query 31 schema +struct +-- !query 31 output +-1 months 1 days -1 seconds + + +-- !query 32 +select +interval -1 month 1 day -1 second +-- !query 32 schema +struct +-- !query 32 output +-1 months 1 days -1 seconds + + +-- !query 33 +select make_interval(1) +-- !query 33 schema +struct +-- !query 33 output +1 years + + +-- !query 34 +select make_interval(1, 2) +-- !query 34 schema +struct +-- !query 34 output +1 years 2 months + + +-- !query 35 +select make_interval(1, 2, 3) +-- !query 35 schema +struct +-- !query 35 output +1 years 2 months 21 days + + +-- !query 36 +select make_interval(1, 2, 3, 4) +-- !query 36 schema +struct +-- !query 36 output +1 years 2 months 25 days + + +-- !query 37 +select make_interval(1, 2, 3, 4, 5) +-- !query 37 schema +struct +-- !query 37 output +1 years 2 months 25 days 5 hours + + +-- !query 38 +select make_interval(1, 2, 3, 4, 5, 6) +-- !query 38 schema +struct +-- !query 38 output +1 years 2 months 25 days 5 hours 6 minutes + + +-- !query 39 +select make_interval(1, 2, 3, 4, 5, 6, 7.008009) +-- !query 39 schema +struct +-- !query 39 output +1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds + + +-- !query 40 +select cast('1 second' as interval) +-- !query 40 schema +struct +-- !query 40 output +1 seconds + + +-- !query 41 +select cast('+1 second' as interval) +-- !query 41 schema +struct +-- !query 41 output +1 seconds + + +-- !query 42 +select cast('-1 second' as interval) +-- !query 42 schema +struct +-- !query 42 output +-1 seconds + + +-- !query 43 +select cast('+ 1 second' as interval) +-- !query 43 schema +struct +-- !query 43 output +1 seconds + + +-- !query 44 +select cast('- 1 second' as interval) +-- !query 44 schema +struct +-- !query 44 output +-1 seconds + + +-- !query 45 +select cast('- -1 second' as interval) +-- !query 45 schema +struct +-- !query 45 output +NULL + + +-- !query 46 +select cast('- +1 second' as interval) +-- !query 46 schema +struct +-- !query 46 output +NULL + + +-- !query 47 +select justify_days(cast(null as interval)) +-- !query 47 schema +struct +-- !query 47 output +NULL + + +-- !query 48 +select justify_hours(cast(null as interval)) +-- !query 48 schema +struct +-- !query 48 output +NULL + + +-- !query 49 +select justify_interval(cast(null as interval)) +-- !query 49 schema +struct +-- !query 49 output +NULL + + +-- !query 50 +select justify_days(interval '1 month 59 day 25 hour') +-- !query 50 schema +struct +-- !query 50 output +2 months 29 days 25 hours + + +-- !query 51 +select justify_hours(interval '1 month 59 day 25 hour') +-- !query 51 schema +struct +-- !query 51 output +1 months 60 days 1 hours + + +-- !query 52 +select justify_interval(interval '1 month 59 day 25 hour') +-- !query 52 schema +struct +-- !query 52 output +3 months 1 hours + + +-- !query 53 +select justify_days(interval '1 month -59 day 25 hour') +-- !query 53 schema +struct +-- !query 53 output +-29 days 25 hours + + +-- !query 54 +select justify_hours(interval '1 month -59 day 25 hour') +-- !query 54 schema +struct +-- !query 54 output +1 months -57 days -23 hours + + +-- !query 55 +select justify_interval(interval '1 month -59 day 25 hour') +-- !query 55 schema +struct +-- !query 55 output +-27 days -23 hours + + +-- !query 56 +select justify_days(interval '1 month 59 day -25 hour') +-- !query 56 schema +struct +-- !query 56 output +2 months 29 days -25 hours + + +-- !query 57 +select justify_hours(interval '1 month 59 day -25 hour') +-- !query 57 schema +struct +-- !query 57 output +1 months 57 days 23 hours + + +-- !query 58 +select justify_interval(interval '1 month 59 day -25 hour') +-- !query 58 schema +struct +-- !query 58 output +2 months 27 days 23 hours + + +-- !query 59 +select interval 13.123456789 seconds, interval -13.123456789 second +-- !query 59 schema +struct +-- !query 59 output +13.123456 seconds -13.123456 seconds + + +-- !query 60 +select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond +-- !query 60 schema +struct +-- !query 60 output +1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds + + +-- !query 61 +select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second +-- !query 61 schema +struct +-- !query 61 output +32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds + + +-- !query 62 +select interval '0 0:0:0.1' day to second +-- !query 62 schema +struct +-- !query 62 output +0.1 seconds + + +-- !query 63 +select interval '10-9' year to month +-- !query 63 schema +struct +-- !query 63 output +10 years 9 months + + +-- !query 64 +select interval '20 15:40:32.99899999' day to hour +-- !query 64 schema +struct +-- !query 64 output +20 days 15 hours + + +-- !query 65 +select interval '20 15:40:32.99899999' day to minute +-- !query 65 schema +struct +-- !query 65 output +20 days 15 hours 40 minutes + + +-- !query 66 +select interval '20 15:40:32.99899999' day to second +-- !query 66 schema +struct +-- !query 66 output +20 days 15 hours 40 minutes 32.998999 seconds + + +-- !query 67 +select interval '15:40:32.99899999' hour to minute +-- !query 67 schema +struct +-- !query 67 output +15 hours 40 minutes + + +-- !query 68 +select interval '15:40.99899999' hour to second +-- !query 68 schema +struct +-- !query 68 output +15 minutes 40.998999 seconds + + +-- !query 69 +select interval '15:40' hour to second +-- !query 69 schema +struct +-- !query 69 output +15 hours 40 minutes + + +-- !query 70 +select interval '15:40:32.99899999' hour to second +-- !query 70 schema +struct +-- !query 70 output +15 hours 40 minutes 32.998999 seconds + + +-- !query 71 +select interval '20 40:32.99899999' minute to second +-- !query 71 schema +struct +-- !query 71 output +20 days 40 minutes 32.998999 seconds + + +-- !query 72 +select interval '40:32.99899999' minute to second +-- !query 72 schema +struct +-- !query 72 output +40 minutes 32.998999 seconds + + +-- !query 73 +select interval '40:32' minute to second +-- !query 73 schema +struct +-- !query 73 output +40 minutes 32 seconds + + +-- !query 74 +select interval 30 day day +-- !query 74 schema +struct<> +-- !query 74 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 23) + +== SQL == +select interval 30 day day +-----------------------^^^ + + +-- !query 75 +select interval 10 nanoseconds +-- !query 75 schema +struct<> +-- !query 75 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input '10 nanoseconds'(line 1, pos 19) + +== SQL == +select interval 10 nanoseconds +-------------------^^^ + + +-- !query 76 +select map(1, interval 1 day, 2, interval 3 week) +-- !query 76 schema +struct> +-- !query 76 output +{1:1 days,2:21 days} + + +-- !query 77 +select interval 'interval 3 year 1 hour' +-- !query 77 schema +struct +-- !query 77 output +3 years 1 hours + + +-- !query 78 +select interval '3 year 1 hour' +-- !query 78 schema +struct +-- !query 78 output +3 years 1 hours + + +-- !query 79 +select interval +-- !query 79 schema +struct<> +-- !query 79 output +org.apache.spark.sql.catalyst.parser.ParseException + +at least one time unit should be given for interval literal(line 1, pos 7) + +== SQL == +select interval +-------^^^ + + +-- !query 80 +select interval 1 fake_unit +-- !query 80 schema +struct<> +-- !query 80 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input '1 fake_unit'(line 1, pos 18) + +== SQL == +select interval 1 fake_unit +------------------^^^ + + +-- !query 81 +select interval 1 year to month +-- !query 81 schema +struct<> +-- !query 81 output +org.apache.spark.sql.catalyst.parser.ParseException + +The value of from-to unit must be a string(line 1, pos 16) + +== SQL == +select interval 1 year to month +----------------^^^ + + +-- !query 82 +select interval '1' year to second +-- !query 82 schema +struct<> +-- !query 82 output +org.apache.spark.sql.catalyst.parser.ParseException + +Intervals FROM year TO second are not supported.(line 1, pos 16) + +== SQL == +select interval '1' year to second +----------------^^^ + + +-- !query 83 +select interval '10-9' year to month '2-1' year to month +-- !query 83 schema +struct<> +-- !query 83 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '2-1' year to month +-------------------------------------^^^ + + +-- !query 84 +select interval '10-9' year to month '12:11:10' hour to second +-- !query 84 schema +struct<> +-- !query 84 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '12:11:10' hour to second +-------------------------------------^^^ + + +-- !query 85 +select interval '1 15:11' day to minute '12:11:10' hour to second +-- !query 85 schema +struct<> +-- !query 85 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 40) + +== SQL == +select interval '1 15:11' day to minute '12:11:10' hour to second +----------------------------------------^^^ + + +-- !query 86 +select interval 1 year '2-1' year to month +-- !query 86 schema +struct<> +-- !query 86 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '2-1' year to month +-----------------------^^^ + + +-- !query 87 +select interval 1 year '12:11:10' hour to second +-- !query 87 schema +struct<> +-- !query 87 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '12:11:10' hour to second +-----------------------^^^ + + +-- !query 88 +select interval '10-9' year to month '1' year +-- !query 88 schema +struct<> +-- !query 88 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '1' year +-------------------------------------^^^ + + +-- !query 89 +select interval '12:11:10' hour to second '1' year +-- !query 89 schema +struct<> +-- !query 89 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 42) + +== SQL == +select interval '12:11:10' hour to second '1' year +------------------------------------------^^^ + + +-- !query 90 +select interval (-30) day +-- !query 90 schema +struct<> +-- !query 90 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 22) + +== SQL == +select interval (-30) day +----------------------^^^ + + +-- !query 91 +select interval (a + 1) day +-- !query 91 schema +struct<> +-- !query 91 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 24) + +== SQL == +select interval (a + 1) day +------------------------^^^ + + +-- !query 92 +select interval 30 day day day +-- !query 92 schema +struct<> +-- !query 92 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 23) + +== SQL == +select interval 30 day day day +-----------------------^^^ + + +-- !query 93 +select sum(cast(null as interval)) +-- !query 93 schema +struct +-- !query 93 output +NULL + + +-- !query 94 +select sum(cast(v as interval)) from VALUES ('1 seconds') t(v) where 1=0 +-- !query 94 schema +struct +-- !query 94 output +NULL + + +-- !query 95 +select sum(cast(v as interval)) from VALUES ('1 seconds'), ('2 seconds'), (null) t(v) +-- !query 95 schema +struct +-- !query 95 output +3 seconds + + +-- !query 96 +select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('2 seconds'), (null) t(v) +-- !query 96 schema +struct +-- !query 96 output +1 seconds + + +-- !query 97 +select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('-2 seconds'), (null) t(v) +-- !query 97 schema +struct +-- !query 97 output +-3 seconds + + +-- !query 98 +select sum(cast(v as interval)) from VALUES ('-1 weeks'), ('2 seconds'), (null) t(v) +-- !query 98 schema +struct +-- !query 98 output +-7 days 2 seconds + + +-- !query 99 +select + i, + sum(cast(v as interval)) +from VALUES (1, '-1 weeks'), (2, '2 seconds'), (3, null), (1, '5 days') t(i, v) +group by i +-- !query 99 schema +struct +-- !query 99 output +1 -2 days +2 2 seconds +3 NULL + + +-- !query 100 +select + sum(cast(v as interval)) as sv +from VALUES (1, '-1 weeks'), (2, '2 seconds'), (3, null), (1, '5 days') t(i, v) +having sv is not null +-- !query 100 schema +struct +-- !query 100 output +-2 days 2 seconds + + +-- !query 101 +SELECT + i, + sum(cast(v as interval)) OVER (ORDER BY i ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) +FROM VALUES(1, '1 seconds'), (1, '2 seconds'), (2, NULL), (2, NULL) t(i,v) +-- !query 101 schema +struct +-- !query 101 output +1 2 seconds +1 3 seconds +2 NULL +2 NULL + + +-- !query 102 +select avg(cast(v as interval)) from VALUES (null) t(v) +-- !query 102 schema +struct +-- !query 102 output +NULL + + +-- !query 103 +select avg(cast(v as interval)) from VALUES ('1 seconds'), ('2 seconds'), (null) t(v) where 1=0 +-- !query 103 schema +struct +-- !query 103 output +NULL + + +-- !query 104 +select avg(cast(v as interval)) from VALUES ('1 seconds'), ('2 seconds'), (null) t(v) +-- !query 104 schema +struct +-- !query 104 output +1.5 seconds + + +-- !query 105 +select avg(cast(v as interval)) from VALUES ('-1 seconds'), ('2 seconds'), (null) t(v) +-- !query 105 schema +struct +-- !query 105 output +0.5 seconds + + +-- !query 106 +select avg(cast(v as interval)) from VALUES ('-1 seconds'), ('-2 seconds'), (null) t(v) +-- !query 106 schema +struct +-- !query 106 output +-1.5 seconds + + +-- !query 107 +select avg(cast(v as interval)) from VALUES ('-1 weeks'), ('2 seconds'), (null) t(v) +-- !query 107 schema +struct +-- !query 107 output +-3 days -11 hours -59 minutes -59 seconds + + +-- !query 108 +select + i, + avg(cast(v as interval)) +from VALUES (1, '-1 weeks'), (2, '2 seconds'), (3, null), (1, '5 days') t(i, v) +group by i +-- !query 108 schema +struct +-- !query 108 output +1 -1 days +2 2 seconds +3 NULL + + +-- !query 109 +select + avg(cast(v as interval)) as sv +from VALUES (1, '-1 weeks'), (2, '2 seconds'), (3, null), (1, '5 days') t(i, v) +having sv is not null +-- !query 109 schema +struct +-- !query 109 output +-15 hours -59 minutes -59.333333 seconds + + +-- !query 110 +SELECT + i, + avg(cast(v as interval)) OVER (ORDER BY i ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) +FROM VALUES (1,'1 seconds'), (1,'2 seconds'), (2,NULL), (2,NULL) t(i,v) +-- !query 110 schema +struct +-- !query 110 output +1 1.5 seconds +1 2 seconds +2 NULL +2 NULL + + +-- !query 111 +create temporary view interval_arithmetic as + select CAST(dateval AS date), CAST(tsval AS timestamp) from values + ('2012-01-01', '2012-01-01') + as interval_arithmetic(dateval, tsval) +-- !query 111 schema +struct<> +-- !query 111 output + + + +-- !query 112 +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic +-- !query 112 schema +struct +-- !query 112 output +2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 + + +-- !query 113 +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic +-- !query 113 schema +struct +-- !query 113 output +2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 + + +-- !query 114 +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic +-- !query 114 schema +struct<(INTERVAL '2 years 2 months' + INTERVAL '3 years 3 months'):interval,(INTERVAL '2 years 2 months' - INTERVAL '3 years 3 months'):interval> +-- !query 114 output +5 years 5 months -1 years -1 months + + +-- !query 115 +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic +-- !query 115 schema +struct +-- !query 115 output +2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 + + +-- !query 116 +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic +-- !query 116 schema +struct +-- !query 116 output +2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 + + +-- !query 117 +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic +-- !query 117 schema +struct<(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' + INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval,(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' - INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval> +-- !query 117 output +109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds + + +-- !query 118 +select 1 year 2 days +-- !query 118 schema +struct +-- !query 118 output +1 years 2 days + + +-- !query 119 +select '10-9' year to month +-- !query 119 schema +struct +-- !query 119 output +10 years 9 months + + +-- !query 120 +select '20 15:40:32.99899999' day to second +-- !query 120 schema +struct +-- !query 120 output +20 days 15 hours 40 minutes 32.998999 seconds + + +-- !query 121 +select 30 day day +-- !query 121 schema +struct<> +-- !query 121 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 14) + +== SQL == +select 30 day day +--------------^^^ + + +-- !query 122 +select date'2012-01-01' - '2-2' year to month +-- !query 122 schema +struct +-- !query 122 output +2009-11-01 + + +-- !query 123 +select 1 month - 1 day +-- !query 123 schema +struct +-- !query 123 output +1 months -1 days + + +-- !query 124 +select 1 year to month +-- !query 124 schema +struct<> +-- !query 124 output +org.apache.spark.sql.catalyst.parser.ParseException + +The value of from-to unit must be a string(line 1, pos 7) + +== SQL == +select 1 year to month +-------^^^ + + +-- !query 125 +select '1' year to second +-- !query 125 schema +struct<> +-- !query 125 output +org.apache.spark.sql.catalyst.parser.ParseException + +Intervals FROM year TO second are not supported.(line 1, pos 7) + +== SQL == +select '1' year to second +-------^^^ + + +-- !query 126 +select 1 year '2-1' year to month +-- !query 126 schema +struct<> +-- !query 126 output +org.apache.spark.sql.catalyst.parser.ParseException + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 14) + +== SQL == +select 1 year '2-1' year to month +--------------^^^ + + +-- !query 127 +select (-30) day +-- !query 127 schema +struct<> +-- !query 127 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 13) + +== SQL == +select (-30) day +-------------^^^ + + +-- !query 128 +select (a + 1) day +-- !query 128 schema +struct<> +-- !query 128 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 15) + +== SQL == +select (a + 1) day +---------------^^^ + + +-- !query 129 +select 30 day day day +-- !query 129 schema +struct<> +-- !query 129 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 14) + +== SQL == +select 30 day day day +--------------^^^ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 6e86d37a310d..f55ef704b092 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -255,8 +255,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession { // If `--IMPORT` found, load code from another test case file, then insert them // into the head in this test. - val importedTestCaseName = comments.filter(_.startsWith("--IMPORT ")).map(_.substring(9)) - val importedCode = importedTestCaseName.flatMap { testCaseName => + val importedTestCaseNames = comments.filter(_.startsWith("--IMPORT ")).map(_.substring(9)) + val importedCode = importedTestCaseNames.flatMap { testCaseName => listTestCases.find(_.name == testCaseName).map { testCase => val input = fileToString(new File(testCase.inputFile)) val (_, code) = input.split("\n").partition(_.trim.startsWith("--")) @@ -277,7 +277,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession { // When we use '--SET' and '--IMPORT' together for those import queries, we want to run the // same queries from the original file but with different settings and save the answers. So the // `--SET` will be respected in this case. - if ((regenerateGoldenFiles && importedTestCaseName.isEmpty) || !isTestWithConfigSets) { + if ((regenerateGoldenFiles && importedTestCaseNames.isEmpty) || !isTestWithConfigSets) { runQueries(queries, testCase, None) } else { val configSets = { diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index dc1609d3326c..119cf4f2e99c 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -106,7 +106,8 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite { "subquery/in-subquery/in-set-operations.sql", // SPARK-29783: need to set conf "interval-display-iso_8601.sql", - "interval-display-sql_standard.sql" + "interval-display-sql_standard.sql", + "ansi/optional-interval.sql" ) override def runQueries(