-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-29379][SQL]SHOW FUNCTIONS show '!=', '<>' , 'between', 'case' #26053
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
911007a
ba22b62
074ee0e
9b8d63e
b066088
3d6c85d
85556a6
22b3487
60cd8a8
522193c
a285290
9f68ead
bc04f99
7ac4d16
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -73,6 +73,11 @@ case class CreateFunctionCommand( | |
| s"is not allowed: '${databaseName.get}'") | ||
| } | ||
|
|
||
| // Redefine a virtual function is not allowed | ||
| if (FunctionsCommand.virtualOperators.contains(functionName.toLowerCase(Locale.ROOT))) { | ||
| throw new AnalysisException(s"It's not allowed to redefine virtual function '$functionName'") | ||
| } | ||
|
|
||
| override def run(sparkSession: SparkSession): Seq[Row] = { | ||
| val catalog = sparkSession.sessionState.catalog | ||
| val func = CatalogFunction(FunctionIdentifier(functionName, databaseName), className, resources) | ||
|
|
@@ -171,6 +176,11 @@ case class DropFunctionCommand( | |
|
|
||
| override def run(sparkSession: SparkSession): Seq[Row] = { | ||
| val catalog = sparkSession.sessionState.catalog | ||
|
|
||
|
||
| if (FunctionsCommand.virtualOperators.contains(functionName.toLowerCase(Locale.ROOT))) { | ||
| throw new AnalysisException(s"Cannot drop virtual function '$functionName'") | ||
|
||
| } | ||
|
|
||
| if (isTemp) { | ||
| if (databaseName.isDefined) { | ||
| throw new AnalysisException(s"Specifying a database in DROP TEMPORARY FUNCTION " + | ||
|
|
@@ -223,13 +233,22 @@ case class ShowFunctionsCommand( | |
| case (f, "USER") if showUserFunctions => f.unquotedString | ||
| case (f, "SYSTEM") if showSystemFunctions => f.unquotedString | ||
| } | ||
| // Hard code "<>", "!=", "between", and "case" for now as there is no corresponding functions. | ||
| // "<>", "!=", "between", and "case" is SystemFunctions, only show when showSystemFunctions=true | ||
| if (showSystemFunctions) { | ||
| (functionNames ++ | ||
| StringUtils.filterPattern(Seq("!=", "<>", "between", "case"), pattern.getOrElse("*"))) | ||
| StringUtils.filterPattern(FunctionsCommand.virtualOperators, pattern.getOrElse("*"))) | ||
| .sorted.map(Row(_)) | ||
| } else { | ||
| functionNames.sorted.map(Row(_)) | ||
| } | ||
|
|
||
| } | ||
| } | ||
|
|
||
| object FunctionsCommand { | ||
| // operators that do not have corresponding functions. | ||
| // They should be handled in `CreateFunctionCommand`, `DescribeFunctionCommand`, | ||
| // `DropFunctionCommand` and `ShowFunctionsCommand` | ||
| val virtualOperators = Seq("!=", "<>", "between", "case") | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.StringUtils | |
| import org.apache.spark.sql.execution.HiveResult.hiveResultString | ||
| import org.apache.spark.sql.execution.aggregate.{HashAggregateExec, SortAggregateExec} | ||
| import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec | ||
| import org.apache.spark.sql.execution.command.FunctionsCommand | ||
| import org.apache.spark.sql.execution.datasources.v2.BatchScanExec | ||
| import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan | ||
| import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan | ||
|
|
@@ -60,7 +61,7 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession { | |
| def getFunctions(pattern: String): Seq[Row] = { | ||
| StringUtils.filterPattern( | ||
| spark.sessionState.catalog.listFunctions("default").map(_._1.funcName) | ||
| ++ Seq("!=", "<>", "between", "case"), pattern) | ||
| ++ FunctionsCommand.virtualOperators, pattern) | ||
| .map(Row(_)) | ||
| } | ||
|
|
||
|
|
@@ -112,6 +113,19 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession { | |
| checkKeywordsExist(sql("describe functioN abcadf"), "Function: abcadf not found.") | ||
| } | ||
|
|
||
| test("drop virtual functions") { | ||
|
||
| val e1 = intercept[AnalysisException] { | ||
| sql( | ||
| "drop function case") | ||
| } | ||
| assert(e1.message == "Cannot drop virtual function 'case'") | ||
| val e2 = intercept[AnalysisException] { | ||
| sql( | ||
| "drop function `!=`") | ||
| } | ||
| assert(e2.message == "Cannot drop virtual function '!='") | ||
| } | ||
|
|
||
| test("SPARK-14415: All functions should have own descriptions") { | ||
| for (f <- spark.sessionState.functionRegistry.listFunction()) { | ||
| if (!Seq("cube", "grouping", "grouping_id", "rollup", "window").contains(f.unquotedString)) { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
what's the error message if users try to redefine
=?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
can't use
create function = ...., since=is a reserved key, we should useError message:
as @HyukjinKwon methoned, we should fix this ambiguity between functions and operators at the end.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If we can't fix the problem completely here, let's keep it unchanged and fix them all together later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ok, have remove these code.