diff --git a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala index 85416b80cfbb7..f22fd936ec6e6 100644 --- a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala +++ b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala @@ -58,10 +58,10 @@ private[sql] class AvroDeserializer( private lazy val decimalConversions = new DecimalConversion() - private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead( + private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead( datetimeRebaseMode, "Avro") - private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead( + private val timestampRebaseFunc = DataSourceUtils.createTimestampRebaseFuncInRead( datetimeRebaseMode, "Avro") private val converter: Any => Option[Any] = rootCatalystType match { diff --git a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala index 33c6022ff7b6d..af0920bd9973a 100644 --- a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala +++ b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala @@ -59,10 +59,10 @@ private[sql] class AvroSerializer( converter.apply(catalystData) } - private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite( + private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite( datetimeRebaseMode, "Avro") - private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInWrite( + private val timestampRebaseFunc = DataSourceUtils.createTimestampRebaseFuncInWrite( datetimeRebaseMode, "Avro") private val converter: Any => Any = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala index b54747a25d5a3..146e5a8727e64 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala @@ -165,7 +165,7 @@ object DataSourceUtils { "Gregorian calendar.", null) } - def creteDateRebaseFuncInRead( + def createDateRebaseFuncInRead( rebaseMode: LegacyBehaviorPolicy.Value, format: String): Int => Int = rebaseMode match { case LegacyBehaviorPolicy.EXCEPTION => days: Int => @@ -177,7 +177,7 @@ object DataSourceUtils { case LegacyBehaviorPolicy.CORRECTED => identity[Int] } - def creteDateRebaseFuncInWrite( + def createDateRebaseFuncInWrite( rebaseMode: LegacyBehaviorPolicy.Value, format: String): Int => Int = rebaseMode match { case LegacyBehaviorPolicy.EXCEPTION => days: Int => @@ -189,7 +189,7 @@ object DataSourceUtils { case LegacyBehaviorPolicy.CORRECTED => identity[Int] } - def creteTimestampRebaseFuncInRead( + def createTimestampRebaseFuncInRead( rebaseMode: LegacyBehaviorPolicy.Value, format: String): Long => Long = rebaseMode match { case LegacyBehaviorPolicy.EXCEPTION => micros: Long => @@ -201,7 +201,7 @@ object DataSourceUtils { case LegacyBehaviorPolicy.CORRECTED => identity[Long] } - def creteTimestampRebaseFuncInWrite( + def createTimestampRebaseFuncInWrite( rebaseMode: LegacyBehaviorPolicy.Value, format: String): Long => Long = rebaseMode match { case LegacyBehaviorPolicy.EXCEPTION => micros: Long => diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala index f65aef95b6c38..fbc8ba2d76ad1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala @@ -188,13 +188,13 @@ private[parquet] class ParquetRowConverter( */ def currentRecord: InternalRow = currentRow - private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead( + private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInRead( datetimeRebaseMode, "Parquet") - private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead( + private val timestampRebaseFunc = DataSourceUtils.createTimestampRebaseFuncInRead( datetimeRebaseMode, "Parquet") - private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead( + private val int96RebaseFunc = DataSourceUtils.createTimestampRebaseFuncInRead( int96RebaseMode, "Parquet INT96") // Converters for each field. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala index 26074719364a4..892546a4ea19c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala @@ -82,16 +82,16 @@ class ParquetWriteSupport extends WriteSupport[InternalRow] with Logging { private val datetimeRebaseMode = LegacyBehaviorPolicy.withName( SQLConf.get.getConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE)) - private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInWrite( + private val dateRebaseFunc = DataSourceUtils.createDateRebaseFuncInWrite( datetimeRebaseMode, "Parquet") - private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInWrite( + private val timestampRebaseFunc = DataSourceUtils.createTimestampRebaseFuncInWrite( datetimeRebaseMode, "Parquet") private val int96RebaseMode = LegacyBehaviorPolicy.withName( SQLConf.get.getConf(SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_WRITE)) - private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInWrite( + private val int96RebaseFunc = DataSourceUtils.createTimestampRebaseFuncInWrite( int96RebaseMode, "Parquet INT96") override def init(configuration: Configuration): WriteContext = {