Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,11 @@
"Unable to convert column <name> of type <type> to JSON."
]
},
"CANNOT_DROP_ALL_FIELDS" : {
"message" : [
"Cannot drop all fields in struct."
]
},
"CAST_WITHOUT_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType>."
Expand All @@ -155,6 +160,21 @@
"To convert values from <srcType> to <targetType>, you can use the functions <functionNames> instead."
]
},
"CREATE_MAP_KEY_DIFF_TYPES" : {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Cloud we make DATA_DIFF_TYPES more general and reuse it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

image

Copy link
Contributor Author

@panbingkun panbingkun Nov 1, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but
image
Very hard to reuse it.
Maybe functionName pass to: map's keys or map's values, seems unreasonable

"message" : [
"The given keys of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_MAP_VALUE_DIFF_TYPES" : {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto

"message" : [
"The given values of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : {
"message" : [
"Only foldable `STRING` expressions are allowed to appear at odd position, but they are <inputExprs>."
]
},
"DATA_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should all be the same type, but it's <dataType>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{Resolver, TypeCheckResult, TypeCoercion, UnresolvedAttribute, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.{FUNC_ALIAS, FunctionBuilder}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
import org.apache.spark.sql.catalyst.expressions.Cast._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
Expand Down Expand Up @@ -202,16 +204,30 @@ case class CreateMap(children: Seq[Expression], useStringTypeWhenEmpty: Boolean)

override def checkInputDataTypes(): TypeCheckResult = {
if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(
s"$prettyName expects a positive even number of arguments.")
DataTypeMismatch(
errorSubClass = "WRONG_NUM_ARGS",
messageParameters = Map(
"functionName" -> toSQLId(prettyName),
"expectedNum" -> "2n (n > 0)",
"actualNum" -> children.length.toString
)
)
} else if (!TypeCoercion.haveSameType(keys.map(_.dataType))) {
TypeCheckResult.TypeCheckFailure(
"The given keys of function map should all be the same type, but they are " +
keys.map(_.dataType.catalogString).mkString("[", ", ", "]"))
DataTypeMismatch(
errorSubClass = "CREATE_MAP_KEY_DIFF_TYPES",
messageParameters = Map(
"functionName" -> toSQLId(prettyName),
"dataType" -> keys.map(key => toSQLType(key.dataType)).mkString("[", ", ", "]")
)
)
} else if (!TypeCoercion.haveSameType(values.map(_.dataType))) {
TypeCheckResult.TypeCheckFailure(
"The given values of function map should all be the same type, but they are " +
values.map(_.dataType.catalogString).mkString("[", ", ", "]"))
DataTypeMismatch(
errorSubClass = "CREATE_MAP_VALUE_DIFF_TYPES",
messageParameters = Map(
"functionName" -> toSQLId(prettyName),
"dataType" -> values.map(value => toSQLType(value.dataType)).mkString("[", ", ", "]")
)
)
} else {
TypeUtils.checkForMapKeyType(dataType.keyType)
}
Expand Down Expand Up @@ -444,17 +460,32 @@ case class CreateNamedStruct(children: Seq[Expression]) extends Expression with

override def checkInputDataTypes(): TypeCheckResult = {
if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(s"$prettyName expects an even number of arguments.")
DataTypeMismatch(
errorSubClass = "WRONG_NUM_ARGS",
messageParameters = Map(
"functionName" -> toSQLId(prettyName),
"expectedNum" -> "2n (n > 0)",
"actualNum" -> children.length.toString
)
)
} else {
val invalidNames = nameExprs.filterNot(e => e.foldable && e.dataType == StringType)
if (invalidNames.nonEmpty) {
TypeCheckResult.TypeCheckFailure(
s"Only foldable ${StringType.catalogString} expressions are allowed to appear at odd" +
s" position, got: ${invalidNames.mkString(",")}")
DataTypeMismatch(
errorSubClass = "CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can NON_FOLDABLE_INPUT be reused?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

image

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems that would be hard to make NON_FOLDABLE_INPUT more general to cover the case. Let's introduce more specific error class for the case.

messageParameters = Map(
"inputExprs" -> invalidNames.map(toSQLExpr(_)).mkString("[", ", ", "]")
)
)
} else if (!names.contains(null)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure("Field name should not be null")
DataTypeMismatch(
errorSubClass = "UNEXPECTED_NULL",
messageParameters = Map(
"exprName" -> nameExprs.map(toSQLExpr).mkString("[", ", ", "]")
)
)
}
}
}
Expand Down Expand Up @@ -668,10 +699,19 @@ case class UpdateFields(structExpr: Expression, fieldOps: Seq[StructFieldsOperat
override def checkInputDataTypes(): TypeCheckResult = {
val dataType = structExpr.dataType
if (!dataType.isInstanceOf[StructType]) {
TypeCheckResult.TypeCheckFailure("struct argument should be struct type, got: " +
dataType.catalogString)
DataTypeMismatch(
errorSubClass = "UNEXPECTED_INPUT_TYPE",
messageParameters = Map(
"paramIndex" -> "1",
"requiredType" -> toSQLType(StructType),
"inputSql" -> toSQLExpr(structExpr),
"inputType" -> toSQLType(structExpr.dataType))
)
} else if (newExprs.isEmpty) {
TypeCheckResult.TypeCheckFailure("cannot drop all fields in struct")
DataTypeMismatch(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems that the original message did not clearly describe this error, the message doesn't look like DataTypeMismatch

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is some kind of UNEXPECTED_INPUT_TYPE: op (drop field) + its args are not supported because they produce empty struct. I am ok w/ this particular error class.

errorSubClass = "CANNOT_DROP_ALL_FIELDS",
messageParameters = Map.empty
)
} else {
TypeCheckResult.TypeCheckSuccess
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer
$"arrayField".array(StringType),
Symbol("mapField").map(StringType, LongType))

private def analysisException(expr: Expression): AnalysisException = {
intercept[AnalysisException](assertSuccess(expr))
}

def assertError(expr: Expression, errorMessage: String): Unit = {
val e = intercept[AnalysisException] {
assertSuccess(expr)
Expand Down Expand Up @@ -483,29 +487,68 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer
}

test("check types for CreateNamedStruct") {
assertError(
CreateNamedStruct(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateNamedStruct(Seq(1, "a", "b", 2.0)),
"Only foldable string expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq($"a".string.at(0), "a", "b", 2.0)),
"Only foldable string expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq(Literal.create(null, StringType), "a")),
"Field name should not be null")
checkError(
exception = analysisException(CreateNamedStruct(Seq("a", "b", 2.0))),
errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
parameters = Map(
"sqlExpr" -> "\"named_struct(a, b, 2.0)\"",
"functionName" -> "`named_struct`",
"expectedNum" -> "2n (n > 0)",
"actualNum" -> "3")
)
checkError(
exception = analysisException(CreateNamedStruct(Seq(1, "a", "b", 2.0))),
errorClass = "DATATYPE_MISMATCH.CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING",
parameters = Map(
"sqlExpr" -> "\"named_struct(1, a, b, 2.0)\"",
"inputExprs" -> "[\"1\"]")
)
checkError(
exception = analysisException(CreateNamedStruct(Seq($"a".string.at(0), "a", "b", 2.0))),
errorClass = "DATATYPE_MISMATCH.CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING",
parameters = Map(
"sqlExpr" -> "\"named_struct(boundreference(), a, b, 2.0)\"",
"inputExprs" -> "[\"boundreference()\"]")
)
checkError(
exception = analysisException(CreateNamedStruct(Seq(Literal.create(null, StringType), "a"))),
errorClass = "DATATYPE_MISMATCH.UNEXPECTED_NULL",
parameters = Map(
"sqlExpr" -> "\"named_struct(NULL, a)\"",
"exprName" -> "[\"NULL\"]")
)
}

test("check types for CreateMap") {
assertError(CreateMap(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateMap(Seq($"intField", $"stringField",
$"booleanField", $"stringField")),
"keys of function map should all be the same type")
assertError(
CreateMap(Seq($"stringField", $"intField",
$"stringField", $"booleanField")),
"values of function map should all be the same type")
checkError(
exception = analysisException(CreateMap(Seq("a", "b", 2.0))),
errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
parameters = Map(
"sqlExpr" -> "\"map(a, b, 2.0)\"",
"functionName" -> "`map`",
"expectedNum" -> "2n (n > 0)",
"actualNum" -> "3")
)
checkError(
exception = analysisException(CreateMap(Seq(Literal(1),
Literal("a"), Literal(true), Literal("b")))),
errorClass = "DATATYPE_MISMATCH.CREATE_MAP_KEY_DIFF_TYPES",
parameters = Map(
"sqlExpr" -> "\"map(1, a, true, b)\"",
"functionName" -> "`map`",
"dataType" -> "[\"INT\", \"BOOLEAN\"]"
)
)
checkError(
exception = analysisException(CreateMap(Seq(Literal("a"),
Literal(1), Literal("b"), Literal(true)))),
errorClass = "DATATYPE_MISMATCH.CREATE_MAP_VALUE_DIFF_TYPES",
parameters = Map(
"sqlExpr" -> "\"map(a, 1, b, true)\"",
"functionName" -> "`map`",
"dataType" -> "[\"INT\", \"BOOLEAN\"]"
)
)
}

test("check types for ROUND/BROUND") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.util._
Expand Down Expand Up @@ -314,6 +315,40 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper {
assert(errorSubClass == "INVALID_MAP_KEY_TYPE")
assert(messageParameters === Map("keyType" -> "\"MAP<INT, INT>\""))
}

// expects a positive even number of arguments
val map3 = CreateMap(Seq(Literal(1), Literal(2), Literal(3)))
assert(map3.checkInputDataTypes() ==
DataTypeMismatch(
errorSubClass = "WRONG_NUM_ARGS",
messageParameters = Map(
"functionName" -> "`map`",
"expectedNum" -> "2n (n > 0)",
"actualNum" -> "3")
)
)

// The given keys of function map should all be the same type
val map4 = CreateMap(Seq(Literal(1), Literal(2), Literal('a'), Literal(3)))
assert(map4.checkInputDataTypes() ==
DataTypeMismatch(
errorSubClass = "CREATE_MAP_KEY_DIFF_TYPES",
messageParameters = Map(
"functionName" -> "`map`",
"dataType" -> "[\"INT\", \"STRING\"]")
)
)

// The given values of function map should all be the same type
val map5 = CreateMap(Seq(Literal(1), Literal(2), Literal(3), Literal('a')))
assert(map5.checkInputDataTypes() ==
DataTypeMismatch(
errorSubClass = "CREATE_MAP_VALUE_DIFF_TYPES",
messageParameters = Map(
"functionName" -> "`map`",
"dataType" -> "[\"INT\", \"STRING\"]")
)
)
}

test("MapFromArrays") {
Expand Down Expand Up @@ -397,6 +432,18 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper {
create_row(UTF8String.fromString("x"), 2.0))
checkEvaluation(CreateNamedStruct(Seq("a", Literal.create(null, IntegerType))),
create_row(null))

// expects a positive even number of arguments
val namedStruct1 = CreateNamedStruct(Seq(Literal(1), Literal(2), Literal(3)))
assert(namedStruct1.checkInputDataTypes() ==
DataTypeMismatch(
errorSubClass = "WRONG_NUM_ARGS",
messageParameters = Map(
"functionName" -> "`named_struct`",
"expectedNum" -> "2n (n > 0)",
"actualNum" -> "3")
)
)
}

test("test dsl for complex type") {
Expand Down
2 changes: 1 addition & 1 deletion sql/core/src/main/scala/org/apache/spark/sql/Column.scala
Original file line number Diff line number Diff line change
Expand Up @@ -940,7 +940,7 @@ class Column(val expr: Expression) extends Logging {
*
* val df = sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
* df.select($"struct_col".dropFields("a", "b"))
* // result: org.apache.spark.sql.AnalysisException: cannot resolve 'update_fields(update_fields(`struct_col`))' due to data type mismatch: cannot drop all fields in struct
* // result: org.apache.spark.sql.AnalysisException: [DATATYPE_MISMATCH.CANNOT_DROP_ALL_FIELDS] Cannot resolve "update_fields(struct_col, dropfield(), dropfield())" due to data type mismatch: Cannot drop all fields in struct.;
*
* val df = sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col")
* df.select($"struct_col".dropFields("b"))
Expand Down
Loading