Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,8 @@ object CurDateExpressionBuilder extends ExpressionBuilder {
if (expressions.isEmpty) {
CurrentDate()
} else {
throw QueryCompilationErrors.invalidFunctionArgumentNumberError(
Seq.empty, funcName, expressions.length)
throw QueryCompilationErrors.invalidFunctionArgumentsError(
funcName, "0", expressions.length)
}
}
}
Expand Down
1 change: 1 addition & 0 deletions sql/core/src/test/resources/sql-tests/inputs/date.sql
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ select date'2021-4294967297-11';
select current_date = current_date;
-- under ANSI mode, `current_date` can't be a function name.
select current_date() = current_date();
select curdate(1);

-- conversions between date and unix_date (number of days from epoch)
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null);
Expand Down
23 changes: 23 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,29 @@ struct<(current_date() = current_date()):boolean>
true


-- !query
select curdate(1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS",
"messageParameters" : {
"actualNum" : "1",
"expectedNum" : "0",
"functionName" : "`curdate`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 17,
"fragment" : "curdate(1)"
} ]
}


-- !query
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
-- !query schema
Expand Down
23 changes: 23 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/date.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,29 @@ struct<(current_date() = current_date()):boolean>
true


-- !query
select curdate(1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS",
"messageParameters" : {
"actualNum" : "1",
"expectedNum" : "0",
"functionName" : "`curdate`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 17,
"fragment" : "curdate(1)"
} ]
}


-- !query
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
-- !query schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,29 @@ struct<(current_date() = current_date()):boolean>
true


-- !query
select curdate(1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS",
"messageParameters" : {
"actualNum" : "1",
"expectedNum" : "0",
"functionName" : "`curdate`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 17,
"fragment" : "curdate(1)"
} ]
}


-- !query
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
-- !query schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,19 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
sql("""SELECT CURDATE()""").collect().head.getDate(0))
val d4 = DateTimeUtils.currentDate(ZoneId.systemDefault())
assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 <= d4 && d4 - d0 <= 1)

checkError(
exception = intercept[AnalysisException] {
sql("SELECT CURDATE(1)")
},
errorClass = "WRONG_NUM_ARGS",
parameters = Map(
"functionName" -> "`curdate`",
"expectedNum" -> "0",
"actualNum" -> "1"
),
context = ExpectedContext("", "", 7, 16, "CURDATE(1)")
)
}

test("function current_timestamp and now") {
Expand Down