diff --git a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/avro/HoodieSpark3_2AvroDeserializer.scala b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/avro/HoodieSpark3_2AvroDeserializer.scala index 0275e2f635d3b..d839c73032cd4 100644 --- a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/avro/HoodieSpark3_2AvroDeserializer.scala +++ b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/avro/HoodieSpark3_2AvroDeserializer.scala @@ -18,13 +18,14 @@ package org.apache.spark.sql.avro import org.apache.avro.Schema -import org.apache.hudi.HoodieSparkUtils +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.DataType class HoodieSpark3_2AvroDeserializer(rootAvroType: Schema, rootCatalystType: DataType) extends HoodieAvroDeserializer { - private val avroDeserializer = new AvroDeserializer(rootAvroType, rootCatalystType, "EXCEPTION") + private val avroDeserializer = new AvroDeserializer(rootAvroType, rootCatalystType, + SQLConf.get.getConf(SQLConf.AVRO_REBASE_MODE_IN_READ)) def deserialize(data: Any): Option[Any] = avroDeserializer.deserialize(data) }