diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index 1b1d5514b3f2..fa52e6cd8517 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -22,8 +22,8 @@ import java.lang.reflect.{Method, Modifier} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} -import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType} import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback +import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.Utils @@ -56,7 +56,9 @@ import org.apache.spark.util.Utils since = "2.0.0", group = "misc_funcs") case class CallMethodViaReflection(children: Seq[Expression]) - extends Nondeterministic with CodegenFallback { + extends Nondeterministic + with CodegenFallback + with QueryErrorsBase { override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("reflect") @@ -65,7 +67,7 @@ case class CallMethodViaReflection(children: Seq[Expression]) DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala index b802678ec046..902f53309de4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala @@ -41,7 +41,7 @@ case class Max(child: Expression) extends DeclarativeAggregate with UnaryLike[Ex override def dataType: DataType = child.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(child.dataType, "function max") + TypeUtils.checkForOrderingExpr(child.dataType, prettyName) private lazy val max = AttributeReference("max", child.dataType)() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala index 664bc32ccc46..096a42686a36 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala @@ -47,7 +47,7 @@ abstract class MaxMinBy extends DeclarativeAggregate with BinaryLike[Expression] override def dataType: DataType = valueExpr.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(orderingExpr.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(orderingExpr.dataType, prettyName) // The attributes used to keep extremum (max or min) and associated aggregated values. private lazy val extremumOrdering = diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala index 9c5c7bbda4dc..7a9588808dbd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala @@ -41,7 +41,7 @@ case class Min(child: Expression) extends DeclarativeAggregate with UnaryLike[Ex override def dataType: DataType = child.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(child.dataType, "function min") + TypeUtils.checkForOrderingExpr(child.dataType, prettyName) private lazy val min = AttributeReference("min", child.dataType)() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index d82108aa3c9f..3e8ec94c33ce 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -1203,7 +1203,7 @@ case class Least(children: Seq[Expression]) extends ComplexTypeMergingExpression DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (!TypeCoercion.haveSameType(inputTypesForMerging)) { @@ -1215,7 +1215,7 @@ case class Least(children: Seq[Expression]) extends ComplexTypeMergingExpression ) ) } else { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } } @@ -1294,7 +1294,7 @@ case class Greatest(children: Seq[Expression]) extends ComplexTypeMergingExpress DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (!TypeCoercion.haveSameType(inputTypesForMerging)) { @@ -1306,7 +1306,7 @@ case class Greatest(children: Seq[Expression]) extends ComplexTypeMergingExpress ) ) } else { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index efaadac6ed1c..256139aca014 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion, UnresolvedAttribute, UnresolvedSeed} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.ArraySortLike.NullOrder -import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.trees.{BinaryLike, SQLQueryContext, UnaryLike} @@ -34,7 +33,7 @@ import org.apache.spark.sql.catalyst.trees.TreePattern.{ARRAYS_ZIP, CONCAT, Tree import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ -import org.apache.spark.sql.errors.QueryExecutionErrors +import org.apache.spark.sql.errors.{QueryErrorsBase, QueryExecutionErrors} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.sql.util.SQLOpenHashSet @@ -47,8 +46,10 @@ import org.apache.spark.unsafe.types.{ByteArray, CalendarInterval, UTF8String} * Base trait for [[BinaryExpression]]s with two arrays of the same element type and implicit * casting. */ -trait BinaryArrayExpressionWithImplicitCast extends BinaryExpression - with ImplicitCastInputTypes { +trait BinaryArrayExpressionWithImplicitCast + extends BinaryExpression + with ImplicitCastInputTypes + with QueryErrorsBase { @transient protected lazy val elementType: DataType = inputTypes.head.asInstanceOf[ArrayType].elementType @@ -72,7 +73,7 @@ trait BinaryArrayExpressionWithImplicitCast extends BinaryExpression DataTypeMismatch( errorSubClass = "BINARY_ARRAY_DIFF_TYPES", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "arrayType" -> toSQLType(ArrayType), "leftType" -> toSQLType(left.dataType), "rightType" -> toSQLType(right.dataType) @@ -219,7 +220,10 @@ case class MapKeys(child: Expression) group = "map_funcs", since = "3.3.0") case class MapContainsKey(left: Expression, right: Expression) - extends RuntimeReplaceable with BinaryLike[Expression] with ImplicitCastInputTypes { + extends RuntimeReplaceable + with BinaryLike[Expression] + with ImplicitCastInputTypes + with QueryErrorsBase { override lazy val replacement: Expression = ArrayContains(MapKeys(left), right) @@ -240,14 +244,14 @@ case class MapContainsKey(left: Expression, right: Expression) case (_, NullType) => DataTypeMismatch( errorSubClass = "NULL_TYPE", - Map("functionName" -> prettyName)) + Map("functionName" -> toSQLId(prettyName))) case (MapType(kt, _, _), dt) if kt.sameType(dt) => - TypeUtils.checkForOrderingExpr(kt, s"function $prettyName") + TypeUtils.checkForOrderingExpr(kt, prettyName) case _ => DataTypeMismatch( errorSubClass = "MAP_CONTAINS_KEY_DIFF_TYPES", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "dataType" -> toSQLType(MapType), "leftType" -> toSQLType(left.dataType), "rightType" -> toSQLType(right.dataType) @@ -676,20 +680,21 @@ case class MapEntries(child: Expression) """, group = "map_funcs", since = "2.4.0") -case class MapConcat(children: Seq[Expression]) extends ComplexTypeMergingExpression { +case class MapConcat(children: Seq[Expression]) + extends ComplexTypeMergingExpression + with QueryErrorsBase { override def checkInputDataTypes(): TypeCheckResult = { - val funcName = s"function $prettyName" if (children.exists(!_.dataType.isInstanceOf[MapType])) { DataTypeMismatch( errorSubClass = "MAP_CONCAT_DIFF_TYPES", messageParameters = Map( - "functionName" -> funcName, + "functionName" -> toSQLId(prettyName), "dataType" -> children.map(_.dataType).map(toSQLType).mkString("[", ", ", "]") ) ) } else { - val sameTypeCheck = TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), funcName) + val sameTypeCheck = TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) if (sameTypeCheck.isFailure) { sameTypeCheck } else { @@ -802,7 +807,10 @@ case class MapConcat(children: Seq[Expression]) extends ComplexTypeMergingExpres """, group = "map_funcs", since = "2.4.0") -case class MapFromEntries(child: Expression) extends UnaryExpression with NullIntolerant { +case class MapFromEntries(child: Expression) + extends UnaryExpression + with NullIntolerant + with QueryErrorsBase { @transient private lazy val dataTypeDetails: Option[(MapType, Boolean, Boolean)] = child.dataType match { @@ -827,7 +835,7 @@ case class MapFromEntries(child: Expression) extends UnaryExpression with NullIn DataTypeMismatch( errorSubClass = "MAP_FROM_ENTRIES_WRONG_TYPE", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "childExpr" -> toSQLExpr(child), "childType" -> toSQLType(child.dataType) ) @@ -1290,7 +1298,7 @@ case class ArrayContains(left: Expression, right: Expression) case (_, NullType) => TypeCheckResult.TypeCheckFailure("Null typed values cannot be used as arguments") case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -1373,7 +1381,7 @@ case class ArraysOverlap(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(elementType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(elementType, prettyName) case failure => failure } @@ -1901,7 +1909,7 @@ case class ArrayMin(child: Expression) override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } else { typeCheckResult } @@ -1974,7 +1982,7 @@ case class ArrayMax(child: Expression) override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } else { typeCheckResult } @@ -2063,7 +2071,7 @@ case class ArrayPosition(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = { (left.dataType, right.dataType) match { case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -2419,7 +2427,7 @@ case class Concat(children: Seq[Expression]) extends ComplexTypeMergingExpressio s" ${BinaryType.simpleString} or ${ArrayType.simpleString}, but it's " + childTypes.map(_.catalogString).mkString("[", ", ", "]")) } - TypeUtils.checkForSameTypeInputExpr(childTypes, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(childTypes, prettyName) } } @@ -3473,7 +3481,7 @@ case class ArrayRemove(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = { (left.dataType, right.dataType) match { case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -3673,7 +3681,7 @@ case class ArrayDistinct(child: Expression) super.checkInputDataTypes() match { case f if f.isFailure => f case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(elementType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(elementType, prettyName) } } @@ -3828,8 +3836,7 @@ trait ArrayBinaryLike override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType.asInstanceOf[ArrayType].elementType, - s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType.asInstanceOf[ArrayType].elementType, prettyName) } else { typeCheckResult } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala index c6ae14e5e3c9..27d4f506ac86 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala @@ -68,7 +68,7 @@ case class CreateArray(children: Seq[Expression], useStringTypeWhenEmpty: Boolea override def stringArgs: Iterator[Any] = super.stringArgs.take(1) override def checkInputDataTypes(): TypeCheckResult = { - TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) } private val defaultElementType: DataType = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index 274de47ee752..d0ef5365bc94 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -444,7 +444,7 @@ case class GetMapValue(child: Expression, key: Expression) super.checkInputDataTypes() match { case f if f.isFailure => f case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(keyType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(keyType, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala index 5b8b4b3f621e..98513fb5dddf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala @@ -1023,7 +1023,7 @@ case class MapZipWith(left: Expression, right: Expression, function: Expression) super.checkArgumentDataTypes() match { case TypeCheckResult.TypeCheckSuccess => if (leftKeyType.sameType(rightKeyType)) { - TypeUtils.checkForOrderingExpr(leftKeyType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(leftKeyType, prettyName) } else { TypeCheckResult.TypeCheckFailure(s"The input to function $prettyName should have " + s"been two ${MapType.simpleString}s with compatible key types, but the key types are " + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala index 959edbd1c5ae..3529644aeeac 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala @@ -355,7 +355,9 @@ case class GetJsonObject(json: Expression, path: Expression) since = "1.6.0") // scalastyle:on line.size.limit line.contains.tab case class JsonTuple(children: Seq[Expression]) - extends Generator with CodegenFallback { + extends Generator + with CodegenFallback + with QueryErrorsBase { import SharedFactory._ @@ -396,7 +398,7 @@ case class JsonTuple(children: Seq[Expression]) DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (children.forall(child => StringType.acceptsType(child.dataType))) { @@ -404,7 +406,7 @@ case class JsonTuple(children: Seq[Expression]) } else { DataTypeMismatch( errorSubClass = "NON_STRING_TYPE", - messageParameters = Map("funcName" -> prettyName)) + messageParameters = Map("funcName" -> toSQLId(prettyName))) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index 28739fb47a2b..f69ece52d858 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -1794,7 +1794,7 @@ case class WidthBucket( TypeCheckSuccess case _ => val types = Seq(value.dataType, minValue.dataType, maxValue.dataType) - TypeUtils.checkForSameTypeInputExpr(types, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(types, prettyName) } case f => f } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala index 8d171c2c6631..1e6cc356173e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala @@ -60,7 +60,7 @@ case class Coalesce(children: Seq[Expression]) TypeCheckResult.TypeCheckFailure( s"input to function $prettyName requires at least one argument") } else { - TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index 21f65cb3402e..899ece6f5297 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -404,7 +404,7 @@ case class InSubquery(values: Seq[Expression], query: ListQuery) |Right side: |[${query.childOutputs.map(_.dataType.catalogString).mkString(", ")}].""".stripMargin) } else { - TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(value.dataType, prettyName) } } @@ -453,7 +453,7 @@ case class In(value: Expression, list: Seq[Expression]) extends Predicate { TypeCheckResult.TypeCheckFailure(s"Arguments must be same type but were: " + s"${value.dataType.catalogString} != ${mismatchOpt.get.dataType.catalogString}") } else { - TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(value.dataType, prettyName) } } @@ -934,7 +934,7 @@ abstract class BinaryComparison extends BinaryOperator with Predicate { override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(left.dataType, this.getClass.getSimpleName) + TypeUtils.checkForOrderingExpr(left.dataType, symbol) case failure => failure } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index 6927c4cfa3c9..8ae4bb9c29c0 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -278,7 +278,7 @@ case class Elt( DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> "elt", + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString ) @@ -305,7 +305,7 @@ case class Elt( ) ) } - TypeUtils.checkForSameTypeInputExpr(inputTypes, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(inputTypes, prettyName) } } @@ -782,7 +782,7 @@ case class Overlay(input: Expression, replace: Expression, pos: Expression, len: val inputTypeCheck = super.checkInputDataTypes() if (inputTypeCheck.isSuccess) { TypeUtils.checkForSameTypeInputExpr( - input.dataType :: replace.dataType :: Nil, s"function $prettyName") + input.dataType :: replace.dataType :: Nil, prettyName) } else { inputTypeCheck } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala index 0bb5d29c5c47..de1460eb2ea3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala @@ -35,7 +35,7 @@ object TypeUtils extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "INVALID_ORDERING_TYPE", Map( - "functionName" -> caller, + "functionName" -> toSQLId(caller), "dataType" -> toSQLType(dt) ) ) @@ -49,7 +49,7 @@ object TypeUtils extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "DATA_DIFF_TYPES", messageParameters = Map( - "functionName" -> caller, + "functionName" -> toSQLId(caller), "dataType" -> types.map(toSQLType).mkString("(", " or ", ")") ) ) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala index c44a0852b85c..ecd5b9e22fb2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala @@ -725,7 +725,7 @@ class AnalysisErrorSuite extends AnalysisTest { inputPlan = plan2, expectedErrorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", expectedMessageParameters = Map( - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"(b = d)\"" ), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala index 0d66ad4b0684..e3829311e2dc 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala @@ -298,7 +298,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualTo($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField = mapField)\"", - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"" ) ) @@ -306,7 +306,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualTo($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField = mapField)\"", - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"" ) ) @@ -314,7 +314,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualNullSafe($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField <=> mapField)\"", - "functionName" -> "EqualNullSafe", + "functionName" -> "`<=>`", "dataType" -> "\"MAP\"" ) ) @@ -322,7 +322,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = LessThan($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField < mapField)\"", - "functionName" -> "LessThan", + "functionName" -> "`<`", "dataType" -> "\"MAP\"" ) ) @@ -330,7 +330,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = LessThanOrEqual($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField <= mapField)\"", - "functionName" -> "LessThanOrEqual", + "functionName" -> "`<=`", "dataType" -> "\"MAP\"" ) ) @@ -338,7 +338,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = GreaterThan($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField > mapField)\"", - "functionName" -> "GreaterThan", + "functionName" -> "`>`", "dataType" -> "\"MAP\"" ) ) @@ -346,7 +346,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = GreaterThanOrEqual($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField >= mapField)\"", - "functionName" -> "GreaterThanOrEqual", + "functionName" -> "`>=`", "dataType" -> "\"MAP\"" ) ) @@ -385,7 +385,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Min($"mapField"), messageParameters = Map( "sqlExpr" -> "\"min(mapField)\"", - "functionName" -> "function min", + "functionName" -> "`min`", "dataType" -> "\"MAP\"" ) ) @@ -393,7 +393,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Max($"mapField"), messageParameters = Map( "sqlExpr" -> "\"max(mapField)\"", - "functionName" -> "function max", + "functionName" -> "`max`", "dataType" -> "\"MAP\"" ) ) @@ -427,7 +427,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = CreateArray(Seq($"intField", $"booleanField")), messageParameters = Map( "sqlExpr" -> "\"array(intField, booleanField)\"", - "functionName" -> "function array", + "functionName" -> "`array`", "dataType" -> "(\"INT\" or \"BOOLEAN\")" ) ) @@ -435,7 +435,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Coalesce(Seq($"intField", $"booleanField")), messageParameters = Map( "sqlExpr" -> "\"coalesce(intField, booleanField)\"", - "functionName" -> "function coalesce", + "functionName" -> "`coalesce`", "dataType" -> "(\"INT\" or \"BOOLEAN\")" ) ) @@ -601,7 +601,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = expr1, messageParameters = Map( "sqlExpr" -> toSQLExpr(expr1), - "functionName" -> expr1.prettyName, + "functionName" -> toSQLId(expr1.prettyName), "expectedNum" -> "> 1", "actualNum" -> "1") ) @@ -621,7 +621,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = expr3, messageParameters = Map( "sqlExpr" -> toSQLExpr(expr3), - "functionName" -> s"function ${expr3.prettyName}", + "functionName" -> s"`${expr3.prettyName}`", "dataType" -> "\"MAP\"" ) ) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala index 5e5d0f7445e3..73cc9aca5682 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala @@ -242,7 +242,7 @@ class PredicateSuite extends SparkFunSuite with ExpressionEvalHelper { case TypeCheckResult.DataTypeMismatch(errorSubClass, messageParameters) => assert(errorSubClass == "INVALID_ORDERING_TYPE") assert(messageParameters === Map( - "functionName" -> "function in", "dataType" -> "\"MAP\"")) + "functionName" -> "`in`", "dataType" -> "\"MAP\"")) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index fce94bf02a0b..94ae774070c8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -1594,7 +1594,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> "elt", + "functionName" -> "`elt`", "expectedNum" -> "> 1", "actualNum" -> "1" ) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 2078d3d8eb68..18ba4fb0ab7d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -3609,7 +3609,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function array", + "functionName" : "`array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3648,7 +3648,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function coalesce", + "functionName" : "`coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index a550dbbec882..a9b577dd4c37 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -73,7 +73,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"INT\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" @@ -98,7 +98,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"STRING\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 6eb5fb4ce844..bdb9ba81ff31 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -3422,7 +3422,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function array", + "functionName" : "`array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3461,7 +3461,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function coalesce", + "functionName" : "`coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index a550dbbec882..a9b577dd4c37 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -73,7 +73,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"INT\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" @@ -98,7 +98,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"STRING\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index 0e2b0cf2789e..726356b7896d 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -95,7 +95,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(tinyint_map1, array_map1)\"" }, "queryContext" : [ { @@ -120,7 +120,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(boolean_map1, int_map2)\"" }, "queryContext" : [ { @@ -145,7 +145,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, STRUCT>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(int_map1, struct_map2)\"" }, "queryContext" : [ { @@ -170,7 +170,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP, STRUCT>\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(struct_map1, array_map2)\"" }, "queryContext" : [ { @@ -195,7 +195,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(int_map1, array_map2)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala index 54911d2a6fb6..ff8dd596ebe1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala @@ -918,7 +918,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", sqlState = None, parameters = Map( - "functionName" -> "function max_by", + "functionName" -> "`max_by`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"max_by(x, y)\"" ), @@ -988,7 +988,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", sqlState = None, parameters = Map( - "functionName" -> "function min_by", + "functionName" -> "`min_by`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"min_by(x, y)\"" ), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index c52cb85e119d..85877c97ed59 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -1012,7 +1012,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, map2)\"", "dataType" -> "(\"MAP, INT>\" or \"MAP\")", - "functionName" -> "function map_concat"), + "functionName" -> "`map_concat`"), context = ExpectedContext( fragment = "map_concat(map1, map2)", start = 0, @@ -1028,7 +1028,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, map2)\"", "dataType" -> "(\"MAP, INT>\" or \"MAP\")", - "functionName" -> "function map_concat") + "functionName" -> "`map_concat`") ) checkError( @@ -1040,7 +1040,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, 12)\"", "dataType" -> "[\"MAP, INT>\", \"INT\"]", - "functionName" -> "function map_concat"), + "functionName" -> "`map_concat`"), context = ExpectedContext( fragment = "map_concat(map1, 12)", start = 0, @@ -1056,7 +1056,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, 12)\"", "dataType" -> "[\"MAP, INT>\", \"INT\"]", - "functionName" -> "function map_concat") + "functionName" -> "`map_concat`") ) } @@ -3651,7 +3651,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_zip_with(mmi, mmi, lambdafunction(x, x, y, z))\"", "dataType" -> "\"MAP\"", - "functionName" -> "function map_zip_with"), + "functionName" -> "`map_zip_with`"), context = ExpectedContext( fragment = "map_zip_with(mmi, mmi, (x, y, z) -> x)", start = 0, @@ -4289,7 +4289,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", - "functionName" -> "greatest", + "functionName" -> "`greatest`", "expectedNum" -> "> 1", "actualNum" -> "0") ) @@ -4302,7 +4302,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", - "functionName" -> "greatest", + "functionName" -> "`greatest`", "expectedNum" -> "> 1", "actualNum" -> "0"), context = ExpectedContext( @@ -4319,7 +4319,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", - "functionName" -> "least", + "functionName" -> "`least`", "expectedNum" -> "> 1", "actualNum" -> "0") ) @@ -4332,7 +4332,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", - "functionName" -> "least", + "functionName" -> "`least`", "expectedNum" -> "> 1", "actualNum" -> "0"), context = ExpectedContext(