Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,9 @@ object JacksonUtils {

case at: ArrayType => verifyType(name, at.elementType)

case mt: MapType => verifyType(name, mt.keyType)
// For MapType, its keys are treated as a string (i.e. calling `toString`) basically when
// generating JSON, so we only care if the values are valid for JSON.
case mt: MapType => verifyType(name, mt.valueType)

case udt: UserDefinedType[_] => verifyType(name, udt.sqlType)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import java.util.Calendar

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils, GenericArrayData, PermissiveMode}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
Expand Down Expand Up @@ -610,4 +611,26 @@ class JsonExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
"""{"t":"2015-12-31T16:00:00"}"""
)
}

test("to_json: verify MapType's value type instead of key type") {
// Keys in map are treated as strings when converting to JSON. The type doesn't matter at all.
val mapType1 = MapType(CalendarIntervalType, IntegerType)
val schema1 = StructType(StructField("a", mapType1) :: Nil)
val struct1 = Literal.create(null, schema1)
checkEvaluation(
StructsToJson(Map.empty, struct1, gmtId),
null
)

// The value type must be valid for converting to JSON.
val mapType2 = MapType(IntegerType, CalendarIntervalType)
val schema2 = StructType(StructField("a", mapType2) :: Nil)
val struct2 = Literal.create(null, schema2)
intercept[UnresolvedException[_]] {
checkEvaluation(
StructsToJson(Map.empty, struct2, gmtId),
null
)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,18 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
"A type of keys and values in map() must be string, but got"))
}

test("SPARK-21954: JacksonUtils should verify MapType's value type instead of key type") {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@viirya, how about puting this test around to_json unsupported type here and maybe use Scala function API for consistency?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Modified as suggestion.

// interval type is invalid for converting to JSON. However, the keys of a map are treated
// as strings, so its type doesn't matter.
checkAnswer(
sql("SELECT to_json(struct(map(interval 1 second, 'a')))"),
Row("""{"col1":{"interval 1 seconds":"a"}}""") :: Nil)
val e = intercept[AnalysisException] {
sql("SELECT to_json(struct(map('a', interval 1 second)))")
}
assert(e.getMessage.contains("Unable to convert column col1 of type calendarinterval to JSON"))
}

test("SPARK-19967 Support from_json in SQL") {
val df1 = Seq("""{"a": 1}""").toDS()
checkAnswer(
Expand Down