Skip to content

Commit c022b37

Browse files
committed
Do not allow replacing complex types.
1 parent ecab485 commit c022b37

File tree

3 files changed

+135
-2
lines changed

3 files changed

+135
-2
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,9 +374,26 @@ trait CheckAnalysis extends PredicateHelper {
374374
}
375375
case update: UpdateColumnType =>
376376
val field = findField("update", update.fieldNames)
377+
val fieldName = update.fieldNames.quoted
378+
update.newDataType match {
379+
case _: StructType =>
380+
throw new AnalysisException(
381+
s"Cannot update ${table.name} field $fieldName type: " +
382+
s"update a struct by adding, deleting, or updating its fields")
383+
case _: MapType =>
384+
throw new AnalysisException(
385+
s"Cannot update ${table.name} field $fieldName type: " +
386+
s"update a map by updating $fieldName.key or $fieldName.value")
387+
case _: ArrayType =>
388+
throw new AnalysisException(
389+
s"Cannot update ${table.name} field $fieldName type: " +
390+
s"update the element by updating $fieldName.element")
391+
case _: AtomicType =>
392+
// update is okay
393+
}
377394
if (!Cast.canUpCast(field.dataType, update.newDataType)) {
378395
throw new AnalysisException(
379-
s"Cannot update ${table.name} field ${update.fieldNames}: " +
396+
s"Cannot update ${table.name} field $fieldName: " +
380397
s"${field.dataType.simpleString} cannot be cast to " +
381398
s"${update.newDataType.simpleString}")
382399
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -519,7 +519,7 @@ case class AlterTable(
519519

520520
override def children: Seq[LogicalPlan] = Seq(table)
521521

522-
override lazy val resolved: Boolean = {
522+
override lazy val resolved: Boolean = childrenResolved && {
523523
changes.forall {
524524
case add: AddColumn =>
525525
add.fieldNames match {

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala

Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -498,6 +498,24 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
498498
}
499499
}
500500

501+
test("AlterTable: add complex column") {
502+
val t = "testcat.ns1.table_name"
503+
withTable(t) {
504+
sql(s"CREATE TABLE $t (id int) USING foo")
505+
sql(s"ALTER TABLE $t ADD COLUMN points array<struct<x: double, y: double>>")
506+
507+
val testCatalog = spark.catalog("testcat").asTableCatalog
508+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
509+
510+
assert(table.name == "testcat.ns1.table_name")
511+
assert(table.schema == new StructType()
512+
.add("id", IntegerType)
513+
.add("points", ArrayType(StructType(Seq(
514+
StructField("x", DoubleType),
515+
StructField("y", DoubleType))))))
516+
}
517+
}
518+
501519
test("AlterTable: add nested column with comment") {
502520
val t = "testcat.ns1.table_name"
503521
withTable(t) {
@@ -563,6 +581,104 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
563581
}
564582
}
565583

584+
test("AlterTable: update column with struct type fails") {
585+
val t = "testcat.ns1.table_name"
586+
withTable(t) {
587+
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING foo")
588+
589+
val exc = intercept[AnalysisException] {
590+
sql(s"ALTER TABLE $t ALTER COLUMN point TYPE struct<x: double, y: double, z: double>")
591+
}
592+
593+
assert(exc.getMessage.contains("point"))
594+
assert(exc.getMessage.contains("update a struct by adding, deleting, or updating its fields"))
595+
596+
val testCatalog = spark.catalog("testcat").asTableCatalog
597+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
598+
599+
assert(table.name == "testcat.ns1.table_name")
600+
assert(table.schema == new StructType()
601+
.add("id", IntegerType)
602+
.add("point", StructType(Seq(
603+
StructField("x", DoubleType),
604+
StructField("y", DoubleType)))))
605+
}
606+
}
607+
608+
test("AlterTable: update column with array type fails") {
609+
val t = "testcat.ns1.table_name"
610+
withTable(t) {
611+
sql(s"CREATE TABLE $t (id int, points array<int>) USING foo")
612+
613+
val exc = intercept[AnalysisException] {
614+
sql(s"ALTER TABLE $t ALTER COLUMN points TYPE array<long>")
615+
}
616+
617+
assert(exc.getMessage.contains("update the element by updating points.element"))
618+
619+
val testCatalog = spark.catalog("testcat").asTableCatalog
620+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
621+
622+
assert(table.name == "testcat.ns1.table_name")
623+
assert(table.schema == new StructType()
624+
.add("id", IntegerType)
625+
.add("points", ArrayType(IntegerType)))
626+
}
627+
}
628+
629+
test("AlterTable: update column array element type") {
630+
val t = "testcat.ns1.table_name"
631+
withTable(t) {
632+
sql(s"CREATE TABLE $t (id int, points array<int>) USING foo")
633+
sql(s"ALTER TABLE $t ALTER COLUMN points.element TYPE long")
634+
635+
val testCatalog = spark.catalog("testcat").asTableCatalog
636+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
637+
638+
assert(table.name == "testcat.ns1.table_name")
639+
assert(table.schema == new StructType()
640+
.add("id", IntegerType)
641+
.add("points", ArrayType(LongType)))
642+
}
643+
}
644+
645+
test("AlterTable: update column with map type fails") {
646+
val t = "testcat.ns1.table_name"
647+
withTable(t) {
648+
sql(s"CREATE TABLE $t (id int, m map<string, int>) USING foo")
649+
650+
val exc = intercept[AnalysisException] {
651+
sql(s"ALTER TABLE $t ALTER COLUMN m TYPE map<string, long>")
652+
}
653+
654+
assert(exc.getMessage.contains("update a map by updating m.key or m.value"))
655+
656+
val testCatalog = spark.catalog("testcat").asTableCatalog
657+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
658+
659+
assert(table.name == "testcat.ns1.table_name")
660+
assert(table.schema == new StructType()
661+
.add("id", IntegerType)
662+
.add("m", MapType(StringType, IntegerType)))
663+
}
664+
}
665+
666+
test("AlterTable: update column map value type") {
667+
val t = "testcat.ns1.table_name"
668+
withTable(t) {
669+
sql(s"CREATE TABLE $t (id int, m map<string, int>) USING foo")
670+
sql(s"ALTER TABLE $t ALTER COLUMN m.value TYPE long")
671+
672+
val testCatalog = spark.catalog("testcat").asTableCatalog
673+
val table = testCatalog.loadTable(Identifier.of(Array("ns1"), "table_name"))
674+
675+
assert(table.name == "testcat.ns1.table_name")
676+
assert(table.schema == new StructType()
677+
.add("id", IntegerType)
678+
.add("m", MapType(StringType, LongType)))
679+
}
680+
}
681+
566682
test("AlterTable: update nested type in map key") {
567683
val t = "testcat.ns1.table_name"
568684
withTable(t) {

0 commit comments

Comments
 (0)