Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,11 @@ case class CreateFunctionCommand(
s"is not allowed: '${databaseName.get}'")
}

// Redefine a virtual function is not allowed
if (FunctionsCommand.virtualOperators.contains(functionName.toLowerCase(Locale.ROOT))) {
throw new AnalysisException(s"It's not allowed to redefine virtual function '$functionName'")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what's the error message if users try to redefine =?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what's the error message if users try to redefine =?

can't use create function = ...., since = is a reserved key, we should use

create function `=` ....

Error message:

org.apache.hadoop.hive.ql.metadata.HiveException: InvalidObjectException(message:= is not a valid object name);
org.apache.hadoop.hive.ql.metadata.HiveException: InvalidObjectException(message:> is not a valid object name);
org.apache.hadoop.hive.ql.metadata.HiveException: InvalidObjectException(message:!= is not a valid object name);

but `case` `between` can be registered.

as @HyukjinKwon methoned, we should fix this ambiguity between functions and operators at the end.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we can't fix the problem completely here, let's keep it unchanged and fix them all together later.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we can't fix the problem completely here, let's keep it unchanged and fix them all together later.

Ok, have remove these code.

}

override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val func = CatalogFunction(FunctionIdentifier(functionName, databaseName), className, resources)
Expand Down Expand Up @@ -171,6 +176,11 @@ case class DropFunctionCommand(

override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have you tried drop function '='? Does Spark fail with "function not found" or "Cannot drop native function"?

Copy link
Contributor Author

@AngersZhuuuu AngersZhuuuu Oct 16, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have you tried drop function '='? Does Spark fail with "function not found" or "Cannot drop native function"?

Function not found.

Error in query: Function 'default.=' not found in database 'default';

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK this can be improved later. Let's leave it for now. Thanks for the investigation!

if (FunctionsCommand.virtualOperators.contains(functionName.toLowerCase(Locale.ROOT))) {
throw new AnalysisException(s"Cannot drop virtual function '$functionName'")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto, we should make sure the behavior is consistent with other operators.

Copy link
Contributor Author

@AngersZhuuuu AngersZhuuuu Oct 16, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto

OK,

drop function `!=\case\between\<>`

here it will through exception of function not found

}

if (isTemp) {
if (databaseName.isDefined) {
throw new AnalysisException(s"Specifying a database in DROP TEMPORARY FUNCTION " +
Expand Down Expand Up @@ -223,13 +233,22 @@ case class ShowFunctionsCommand(
case (f, "USER") if showUserFunctions => f.unquotedString
case (f, "SYSTEM") if showSystemFunctions => f.unquotedString
}
// Hard code "<>", "!=", "between", and "case" for now as there is no corresponding functions.
// "<>", "!=", "between", and "case" is SystemFunctions, only show when showSystemFunctions=true
if (showSystemFunctions) {
(functionNames ++
StringUtils.filterPattern(Seq("!=", "<>", "between", "case"), pattern.getOrElse("*")))
StringUtils.filterPattern(FunctionsCommand.virtualOperators, pattern.getOrElse("*")))
.sorted.map(Row(_))
} else {
functionNames.sorted.map(Row(_))
}

}
}

object FunctionsCommand {
// operators that do not have corresponding functions.
// They should be handled in `CreateFunctionCommand`, `DescribeFunctionCommand`,
// `DropFunctionCommand` and `ShowFunctionsCommand`
val virtualOperators = Seq("!=", "<>", "between", "case")
}
16 changes: 15 additions & 1 deletion sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.StringUtils
import org.apache.spark.sql.execution.HiveResult.hiveResultString
import org.apache.spark.sql.execution.aggregate.{HashAggregateExec, SortAggregateExec}
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.execution.command.FunctionsCommand
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan
Expand Down Expand Up @@ -60,7 +61,7 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession {
def getFunctions(pattern: String): Seq[Row] = {
StringUtils.filterPattern(
spark.sessionState.catalog.listFunctions("default").map(_._1.funcName)
++ Seq("!=", "<>", "between", "case"), pattern)
++ FunctionsCommand.virtualOperators, pattern)
.map(Row(_))
}

Expand Down Expand Up @@ -112,6 +113,19 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession {
checkKeywordsExist(sql("describe functioN abcadf"), "Function: abcadf not found.")
}

test("drop virtual functions") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

since the related changes are reverted, we should remove the test as well

val e1 = intercept[AnalysisException] {
sql(
"drop function case")
}
assert(e1.message == "Cannot drop virtual function 'case'")
val e2 = intercept[AnalysisException] {
sql(
"drop function `!=`")
}
assert(e2.message == "Cannot drop virtual function '!='")
}

test("SPARK-14415: All functions should have own descriptions") {
for (f <- spark.sessionState.functionRegistry.listFunction()) {
if (!Seq("cube", "grouping", "grouping_id", "rollup", "window").contains(f.unquotedString)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, Functio
import org.apache.spark.sql.catalyst.catalog.{CatalogTableType, CatalogUtils, HiveTableRelation}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.execution.command.LoadDataCommand
import org.apache.spark.sql.execution.command.{FunctionsCommand, LoadDataCommand}
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils}
Expand Down Expand Up @@ -193,7 +193,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
assert(allFunctions.contains(f))
}

Seq("!=", "<>", "between", "case").foreach { f =>
FunctionsCommand.virtualOperators.foreach { f =>
assert(allFunctions.contains(f))
}

Expand Down