Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ protected HoodieFileReader newParquetFileReader(Configuration conf, Path path) {
conf.setIfUnset(SQLConf.PARQUET_BINARY_AS_STRING().key(), SQLConf.PARQUET_BINARY_AS_STRING().defaultValueString());
conf.setIfUnset(SQLConf.PARQUET_INT96_AS_TIMESTAMP().key(), SQLConf.PARQUET_INT96_AS_TIMESTAMP().defaultValueString());
conf.setIfUnset(SQLConf.CASE_SENSITIVE().key(), SQLConf.CASE_SENSITIVE().defaultValueString());
// Using string value of this conf to preserve compatibility across spark versions.
conf.setIfUnset("spark.sql.legacy.parquet.nanosAsLong", "false");
return new HoodieSparkParquetReader(conf, path);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ private[hudi] trait SparkVersionsSupport {
def gteqSpark3_2_1: Boolean = getSparkVersion >= "3.2.1"
def gteqSpark3_2_2: Boolean = getSparkVersion >= "3.2.2"
def gteqSpark3_3: Boolean = getSparkVersion >= "3.3"
def gteqSpark3_3_2: Boolean = getSparkVersion >= "3.3.2"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this based on compile-time Spark version or runtime Spark version?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be runtime, I think.

}

object HoodieSparkUtils extends SparkAdapterSupport with SparkVersionsSupport with Logging {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,10 @@ case class BaseFileOnlyRelation(override val sqlContext: SQLContext,

override def imbueConfigs(sqlContext: SQLContext): Unit = {
super.imbueConfigs(sqlContext)
sqlContext.sparkSession.sessionState.conf.setConfString("spark.sql.parquet.enableVectorizedReader", "true")
// TODO Issue with setting this to true in spark 332
if (!HoodieSparkUtils.gteqSpark3_3_2) {
sqlContext.sparkSession.sessionState.conf.setConfString("spark.sql.parquet.enableVectorizedReader", "true")
}
}

protected override def composeRDD(fileSplits: Seq[HoodieBaseFileSplit],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,11 @@ class Spark32PlusHoodieParquetFileFormat(private val shouldAppendPartitionValues
hadoopConf.setBoolean(
SQLConf.PARQUET_INT96_AS_TIMESTAMP.key,
sparkSession.sessionState.conf.isParquetINT96AsTimestamp)

// Using string value of this conf to preserve compatibility across spark versions.
hadoopConf.setBoolean(
"spark.sql.legacy.parquet.nanosAsLong",
sparkSession.sessionState.conf.getConfString("spark.sql.legacy.parquet.nanosAsLong", "false").toBoolean
)
val internalSchemaStr = hadoopConf.get(SparkInternalSchemaConverter.HOODIE_QUERY_SCHEMA)
// For Spark DataSource v1, there's no Physical Plan projection/schema pruning w/in Spark itself,
// therefore it's safe to do schema projection here
Expand Down
2 changes: 2 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2213,6 +2213,7 @@
<spark3.version>${spark33.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3</sparkbundle.version>
<scala12.version>2.12.15</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.3.x</hudi.spark.module>
Expand Down Expand Up @@ -2337,6 +2338,7 @@
<spark3.version>${spark33.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.3</sparkbundle.version>
<scala12.version>2.12.15</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.3.x</hudi.spark.module>
Expand Down