diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala index 374eb070db1c..7fe8bd356ea9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala @@ -116,7 +116,9 @@ abstract class LogicalPlan def resolve(schema: StructType, resolver: Resolver): Seq[Attribute] = { schema.map { field => resolve(field.name :: Nil, resolver).map { - case a: AttributeReference => a + case a: AttributeReference => + // Keep the metadata in given schema. + a.withMetadata(field.metadata) case _ => throw QueryExecutionErrors.resolveCannotHandleNestedSchema(this) }.getOrElse { throw QueryCompilationErrors.cannotResolveAttributeError( diff --git a/sql/core/src/test/resources/test-data/char.csv b/sql/core/src/test/resources/test-data/char.csv new file mode 100644 index 000000000000..d2be68a15fc1 --- /dev/null +++ b/sql/core/src/test/resources/test-data/char.csv @@ -0,0 +1,4 @@ +color,name +pink,Bob +blue,Mike +grey,Tom diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala index a91adb787838..3762c00ff1a1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala @@ -80,6 +80,7 @@ abstract class CSVSuite private val valueMalformedFile = "test-data/value-malformed.csv" private val badAfterGoodFile = "test-data/bad_after_good.csv" private val malformedRowFile = "test-data/malformedRow.csv" + private val charFile = "test-data/char.csv" /** Verifies data and schema. */ private def verifyCars( @@ -3226,6 +3227,29 @@ abstract class CSVSuite } } } + + test("SPARK-48241: CSV parsing failure with char/varchar type columns") { + withTable("charVarcharTable") { + spark.sql( + s""" + |CREATE TABLE charVarcharTable( + | color char(4), + | name varchar(10)) + |USING csv + |OPTIONS ( + | header "true", + | path "${testFile(charFile)}" + |) + """.stripMargin) + val expected = Seq( + Row("pink", "Bob"), + Row("blue", "Mike"), + Row("grey", "Tom")) + checkAnswer( + sql("SELECT * FROM charVarcharTable"), + expected) + } + } } class CSVv1Suite extends CSVSuite {