Skip to content
Closed
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/sql-migration-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,8 @@ license: |
For example `SELECT timestamp 'tomorrow';`.

- Since Spark 3.0, the `size` function returns `NULL` for the `NULL` input. In Spark version 2.4 and earlier, this function gives `-1` for the same input. To restore the behavior before Spark 3.0, you can set `spark.sql.legacy.sizeOfNull` to `true`.

- Since Spark 3.0, when `array` function is called without parameters, it returns `array<null>`.

## Upgrading from Spark SQL 2.4 to 2.4.1

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ case class CreateArray(children: Seq[Expression]) extends Expression {
override def dataType: ArrayType = {
ArrayType(
TypeCoercion.findCommonTypeDifferentOnlyInNullFlags(children.map(_.dataType))
.getOrElse(StringType),
.getOrElse(NullType),
containsNull = children.exists(_.nullable))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3400,12 +3400,9 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession {
).foreach(assertValuesDoNotChangeAfterCoalesceOrUnion(_))
}

test("SPARK-21281 use string types by default if array and map have no argument") {
test("SPARK-21281 use string types by default if map have no argument") {
val ds = spark.range(1)
var expectedSchema = new StructType()
.add("x", ArrayType(StringType, containsNull = false), nullable = false)
assert(ds.select(array().as("x")).schema == expectedSchema)
expectedSchema = new StructType()
.add("x", MapType(StringType, StringType, valueContainsNull = false), nullable = false)
assert(ds.select(map().as("x")).schema == expectedSchema)
}
Expand Down Expand Up @@ -3463,6 +3460,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession {
checkAnswer(df.select("x").filter("exists(i, x -> x % d == 0)"),
Seq(Row(1)))
}

test("SPARK-29462: Use null type by default if array have no argument") {
val ds = spark.range(1)
var expectedSchema = new StructType()
.add("x", ArrayType(NullType, containsNull = false), nullable = false)
assert(ds.select(array().as("x")).schema == expectedSchema)
}
}

object DataFrameFunctionsSuite {
Expand Down