From 5ff3061242431ae45362762d6eb628a5e9b39eb6 Mon Sep 17 00:00:00 2001 From: Gary Li Date: Sun, 10 Jan 2021 10:53:07 +0800 Subject: [PATCH] [MINOR] fix spark 3 build for incremental query on MOR --- .../org/apache/hudi/MergeOnReadIncrementalRelation.scala | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/MergeOnReadIncrementalRelation.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/MergeOnReadIncrementalRelation.scala index d7b8cffcf0464..c85b972910c54 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/MergeOnReadIncrementalRelation.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/MergeOnReadIncrementalRelation.scala @@ -17,8 +17,6 @@ package org.apache.hudi -import org.apache.hadoop.fs.{FileStatus, FileSystem, GlobPattern, Path} -import org.apache.hadoop.mapred.JobConf import org.apache.hudi.common.fs.FSUtils import org.apache.hudi.common.model.HoodieRecord import org.apache.hudi.common.table.view.HoodieTableFileSystemView @@ -26,8 +24,10 @@ import org.apache.hudi.common.table.{HoodieTableMetaClient, TableSchemaResolver} import org.apache.hudi.exception.HoodieException import org.apache.hudi.hadoop.utils.HoodieInputFormatUtils.listAffectedFilesForCommits import org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils.getMaxCompactionMemoryInBytes + +import org.apache.hadoop.fs.{FileStatus, GlobPattern, Path} +import org.apache.hadoop.mapred.JobConf import org.apache.log4j.LogManager -import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.execution.datasources.PartitionedFile @@ -138,9 +138,6 @@ class MergeOnReadIncrementalRelation(val sqlContext: SQLContext, hadoopConf = sqlContext.sparkSession.sessionState.newHadoopConf() ) - // Follow the implementation of Spark internal HadoopRDD to handle the broadcast configuration. - FileSystem.getLocal(jobConf) - SparkHadoopUtil.get.addCredentials(jobConf) val rdd = new HoodieMergeOnReadRDD( sqlContext.sparkContext, jobConf,