diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index 6c4aee4f58b7..c11186ebc074 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -218,7 +218,7 @@ object FunctionRegistry { expression[PosExplode]("posexplode"), expressionGeneratorOuter[PosExplode]("posexplode_outer"), expression[Rand]("rand"), - expression[Rand]("random"), + expression[Rand]("random", true), expression[Randn]("randn"), expression[Stack]("stack"), expression[CaseWhen]("when"), @@ -235,7 +235,7 @@ object FunctionRegistry { expression[BRound]("bround"), expression[Cbrt]("cbrt"), expression[Ceil]("ceil"), - expression[Ceil]("ceiling"), + expression[Ceil]("ceiling", true), expression[Cos]("cos"), expression[Cosh]("cosh"), expression[Conv]("conv"), @@ -252,12 +252,12 @@ object FunctionRegistry { expression[Log1p]("log1p"), expression[Log2]("log2"), expression[Log]("ln"), - expression[Remainder]("mod"), + expression[Remainder]("mod", true), expression[UnaryMinus]("negative"), expression[Pi]("pi"), expression[Pmod]("pmod"), expression[UnaryPositive]("positive"), - expression[Pow]("pow"), + expression[Pow]("pow", true), expression[Pow]("power"), expression[ToRadians]("radians"), expression[Rint]("rint"), @@ -265,7 +265,7 @@ object FunctionRegistry { expression[ShiftLeft]("shiftleft"), expression[ShiftRight]("shiftright"), expression[ShiftRightUnsigned]("shiftrightunsigned"), - expression[Signum]("sign"), + expression[Signum]("sign", true), expression[Signum]("signum"), expression[Sin]("sin"), expression[Sinh]("sinh"), @@ -323,12 +323,12 @@ object FunctionRegistry { // string functions expression[Ascii]("ascii"), - expression[Chr]("char"), + expression[Chr]("char", true), expression[Chr]("chr"), expression[Base64]("base64"), expression[BitLength]("bit_length"), - expression[Length]("char_length"), - expression[Length]("character_length"), + expression[Length]("char_length", true), + expression[Length]("character_length", true), expression[ConcatWs]("concat_ws"), expression[Decode]("decode"), expression[Elt]("elt"), @@ -351,7 +351,7 @@ object FunctionRegistry { expression[JsonTuple]("json_tuple"), expression[ParseUrl]("parse_url"), expression[StringLocate]("position"), - expression[FormatString]("printf"), + expression[FormatString]("printf", true), expression[RegExpExtract]("regexp_extract"), expression[RegExpReplace]("regexp_replace"), expression[StringRepeat]("repeat"), @@ -364,21 +364,21 @@ object FunctionRegistry { expression[SoundEx]("soundex"), expression[StringSpace]("space"), expression[StringSplit]("split"), - expression[Substring]("substr"), + expression[Substring]("substr", true), expression[Substring]("substring"), expression[Left]("left"), expression[Right]("right"), expression[SubstringIndex]("substring_index"), expression[StringTranslate]("translate"), expression[StringTrim]("trim"), - expression[Upper]("ucase"), + expression[Upper]("ucase", true), expression[UnBase64]("unbase64"), expression[Unhex]("unhex"), expression[Upper]("upper"), expression[XPathList]("xpath"), expression[XPathBoolean]("xpath_boolean"), expression[XPathDouble]("xpath_double"), - expression[XPathDouble]("xpath_number"), + expression[XPathDouble]("xpath_number", true), expression[XPathFloat]("xpath_float"), expression[XPathInt]("xpath_int"), expression[XPathLong]("xpath_long"), @@ -393,7 +393,7 @@ object FunctionRegistry { expression[DateAdd]("date_add"), expression[DateFormatClass]("date_format"), expression[DateSub]("date_sub"), - expression[DayOfMonth]("day"), + expression[DayOfMonth]("day", true), expression[DayOfYear]("dayofyear"), expression[DayOfMonth]("dayofmonth"), expression[FromUnixTime]("from_unixtime"), @@ -404,7 +404,7 @@ object FunctionRegistry { expression[Month]("month"), expression[MonthsBetween]("months_between"), expression[NextDay]("next_day"), - expression[CurrentTimestamp]("now"), + expression[CurrentTimestamp]("now", true), expression[Quarter]("quarter"), expression[Second]("second"), expression[ParseToTimestamp]("to_timestamp"), @@ -445,7 +445,7 @@ object FunctionRegistry { expression[MapConcat]("map_concat"), expression[Size]("size"), expression[Slice]("slice"), - expression[Size]("cardinality"), + expression[Size]("cardinality", true), expression[ArraysZip]("arrays_zip"), expression[SortArray]("sort_array"), expression[Shuffle]("shuffle"), @@ -478,7 +478,7 @@ object FunctionRegistry { expression[Uuid]("uuid"), expression[Murmur3Hash]("hash"), expression[XxHash64]("xxhash64"), - expression[Sha1]("sha"), + expression[Sha1]("sha", true), expression[Sha1]("sha1"), expression[Sha2]("sha2"), expression[SparkPartitionID]("spark_partition_id"), @@ -488,7 +488,7 @@ object FunctionRegistry { expression[MonotonicallyIncreasingID]("monotonically_increasing_id"), expression[CurrentDatabase]("current_database"), expression[CallMethodViaReflection]("reflect"), - expression[CallMethodViaReflection]("java_method"), + expression[CallMethodViaReflection]("java_method", true), expression[SparkVersion]("version"), expression[TypeOf]("typeof"), @@ -590,7 +590,9 @@ object FunctionRegistry { if (varargCtor.isDefined) { // If there is an apply method that accepts Seq[Expression], use that one. try { - varargCtor.get.newInstance(expressions).asInstanceOf[Expression] + val exp = varargCtor.get.newInstance(expressions).asInstanceOf[Expression] + if (setAlias) exp.setTagValue(FUNC_ALIAS, name) + exp } catch { // the exception is an invocation exception. To get a meaningful message, we need the // cause. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index 65bb9a8c642b..e6a4c8f1d374 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import java.lang.reflect.{Method, Modifier} import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.types._ @@ -55,7 +55,7 @@ import org.apache.spark.util.Utils case class CallMethodViaReflection(children: Seq[Expression]) extends Expression with CodegenFallback { - override def prettyName: String = "reflect" + override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("reflect") override def checkInputDataTypes(): TypeCheckResult = { if (children.size < 2) { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala index 1599321982ce..f29ece2e03b0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import java.util.Locale import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion} +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult, TypeCoercion} import org.apache.spark.sql.catalyst.expressions.aggregate.DeclarativeAggregate import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ @@ -258,7 +258,8 @@ abstract class Expression extends TreeNode[Expression] { * Returns a user-facing string representation of this expression's name. * This should usually match the name of the function in SQL. */ - def prettyName: String = nodeName.toLowerCase(Locale.ROOT) + def prettyName: String = + getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(nodeName.toLowerCase(Locale.ROOT)) protected def flatArguments: Iterator[Any] = stringArgs.flatMap { case t: Iterable[_] => t diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala index 210acf33fc43..2c0060c22a86 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala @@ -117,7 +117,5 @@ case class First(child: Expression, ignoreNullsExpr: Expression) override lazy val evaluateExpression: AttributeReference = first - override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("first") - override def toString: String = s"$prettyName($child)${if (ignoreNulls) " ignore nulls"}" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala index 2c89a4b973a7..6793ac7632ff 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala @@ -115,7 +115,5 @@ case class Last(child: Expression, ignoreNullsExpr: Expression) override lazy val evaluateExpression: AttributeReference = last - override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("last") - override def toString: String = s"$prettyName($child)${if (ignoreNulls) " ignore nulls"}" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 215e88a2cc8a..6a64819aabb4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion} +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult, TypeCoercion} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util.{IntervalUtils, TypeUtils} @@ -457,6 +457,18 @@ case class Remainder(left: Expression, right: Expression) extends DivModLike { override def symbol: String = "%" override def decimalMethod: String = "remainder" + override def toString: String = { + getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(sqlOperator) match { + case operator if operator == sqlOperator => s"($left $sqlOperator $right)" + case funcName => s"$funcName($left, $right)" + } + } + override def sql: String = { + getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(sqlOperator) match { + case operator if operator == sqlOperator => s"(${left.sql} $sqlOperator ${right.sql})" + case funcName => s"$funcName(${left.sql}, ${right.sql})" + } + } private lazy val mod: (Any, Any) => Any = dataType match { // special cases to make float/double primitive types faster diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 1e83a568995e..d785c9ad088f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -29,6 +29,7 @@ import org.apache.commons.text.StringEscapeUtils import org.apache.spark.SparkUpgradeException import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util.{DateTimeUtils, LegacyDateFormats, TimestampFormatter} @@ -99,7 +100,8 @@ case class CurrentTimestamp() extends LeafExpression with CodegenFallback { override def eval(input: InternalRow): Any = currentTimestamp() - override def prettyName: String = "current_timestamp" + override def prettyName: String = + getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("current_timestamp") } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index d5b959b91c23..66e6334e3a45 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -21,7 +21,7 @@ import java.{lang => jl} import java.util.Locale import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ @@ -62,8 +62,8 @@ abstract class UnaryMathExpression(val f: Double => Double, name: String) override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType) override def dataType: DataType = DoubleType override def nullable: Boolean = true - override def toString: String = s"$name($child)" - override def prettyName: String = name + override def toString: String = s"$prettyName($child)" + override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name) protected override def nullSafeEval(input: Any): Any = { f(input.asInstanceOf[Double]) @@ -115,9 +115,9 @@ abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String) override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType) - override def toString: String = s"$name($left, $right)" + override def toString: String = s"$prettyName($left, $right)" - override def prettyName: String = name + override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name) override def dataType: DataType = DoubleType diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index cc09f601db9c..50a90ae40497 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -27,7 +27,7 @@ import scala.collection.mutable.ArrayBuffer import org.apache.commons.codec.binary.{Base64 => CommonsBase64} import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData, TypeUtils} @@ -1450,7 +1450,7 @@ case class ParseUrl(children: Seq[Expression]) // scalastyle:on line.size.limit case class FormatString(children: Expression*) extends Expression with ImplicitCastInputTypes { - require(children.nonEmpty, "format_string() should take at least 1 argument") + require(children.nonEmpty, s"$prettyName() should take at least 1 argument") override def foldable: Boolean = children.forall(_.foldable) override def nullable: Boolean = children(0).nullable @@ -1517,7 +1517,8 @@ case class FormatString(children: Expression*) extends Expression with ImplicitC }""") } - override def prettyName: String = "format_string" + override def prettyName: String = getTagValue( + FunctionRegistry.FUNC_ALIAS).getOrElse("format_string") } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala index 073b45af51ca..55e06cb9e847 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.expressions.xml -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback @@ -160,7 +160,8 @@ case class XPathFloat(xml: Expression, path: Expression) extends XPathExtract { """) // scalastyle:on line.size.limit case class XPathDouble(xml: Expression, path: Expression) extends XPathExtract { - override def prettyName: String = "xpath_double" + override def prettyName: String = + getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("xpath_double") override def dataType: DataType = DoubleType override def nullSafeEval(xml: Any, path: Any): Any = { diff --git a/sql/core/src/test/resources/sql-tests/results/operators.sql.out b/sql/core/src/test/resources/sql-tests/results/operators.sql.out index 083410f8807c..cf857cf9f98a 100644 --- a/sql/core/src/test/resources/sql-tests/results/operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/operators.sql.out @@ -293,7 +293,7 @@ struct -- !query select ceiling(0) -- !query schema -struct +struct -- !query output 0 @@ -301,7 +301,7 @@ struct -- !query select ceiling(1) -- !query schema -struct +struct -- !query output 1 @@ -317,7 +317,7 @@ struct -- !query select ceiling(1234567890123456) -- !query schema -struct +struct -- !query output 1234567890123456 @@ -333,7 +333,7 @@ struct -- !query select ceiling(-0.10) -- !query schema -struct +struct -- !query output 0 @@ -389,7 +389,7 @@ true -- !query select mod(7, 2), mod(7, 0), mod(0, 2), mod(7, null), mod(null, 2), mod(null, null) -- !query schema -struct<(7 % 2):int,(7 % 0):int,(0 % 2):int,(7 % CAST(NULL AS INT)):int,(CAST(NULL AS INT) % 2):int,(CAST(NULL AS DOUBLE) % CAST(NULL AS DOUBLE)):double> +struct -- !query output 1 NULL 0 NULL NULL NULL @@ -405,7 +405,7 @@ struct -- !query select CHAR_LENGTH('abc') -- !query schema -struct +struct -- !query output 3 @@ -413,7 +413,7 @@ struct -- !query select CHARACTER_LENGTH('abc') -- !query schema -struct +struct -- !query output 3 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out index 1046d0ec86bb..63ad74aac32e 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out @@ -64,7 +64,7 @@ struct<> -- !query select col1, col2, char_length(col3) from inserttest -- !query schema -struct +struct -- !query output 30 50 10000 NULL 3 7 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index 65b6641983a1..e59b9d5b63a4 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -4404,7 +4404,7 @@ struct<> -- !query SELECT a, ceil(a), ceiling(a), floor(a), round(a) FROM ceil_floor_round -- !query schema -struct +struct -- !query output -0.000001000000000000 0 0 -1 0 -5.499999000000000000 -5 -5 -6 -5 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index 3b26d561d20c..5f89c799498a 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -627,7 +627,7 @@ struct<> -- !query SELECT substr(f1, 99995) from toasttest -- !query schema -struct +struct -- !query output 567890 567890 @@ -638,7 +638,7 @@ struct -- !query SELECT substr(f1, 99995, 10) from toasttest -- !query schema -struct +struct -- !query output 567890 567890 diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index 708dbb404c28..042d332bdb5c 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -111,7 +111,7 @@ struct> -- !query SELECT substr('Spark SQL', 5) -- !query schema -struct +struct -- !query output k SQL @@ -119,7 +119,7 @@ k SQL -- !query SELECT substr('Spark SQL', -3) -- !query schema -struct +struct -- !query output SQL @@ -127,7 +127,7 @@ SQL -- !query SELECT substr('Spark SQL', 5, 1) -- !query schema -struct +struct -- !query output k diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out index f841adf89612..e47decbd3392 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out @@ -285,7 +285,7 @@ struct -- !query SELECT day( '1996-01-10') FROM t -- !query schema -struct +struct -- !query output 10 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala index b59170527411..16c58028adfe 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala @@ -116,8 +116,8 @@ class ExplainSuite extends QueryTest with SharedSparkSession { // plan should show the rewritten aggregate expression. val df = sql("SELECT k, every(v), some(v), any(v) FROM test_agg GROUP BY k") checkKeywordsExistsInExplain(df, - "Aggregate [k#x], [k#x, min(v#x) AS every(v)#x, max(v#x) AS some(v)#x, " + - "max(v#x) AS any(v)#x]") + "Aggregate [k#x], [k#x, every(v#x) AS every(v)#x, some(v#x) AS some(v)#x, " + + "any(v#x) AS any(v)#x]") } }