diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieParquetInputFormatBase.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieParquetInputFormatBase.java index ed88acacb4d2f..f6ba3fd8141c9 100644 --- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieParquetInputFormatBase.java +++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieParquetInputFormatBase.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.io.SelfDescribingInputFormatInterface; import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileSplit; @@ -45,7 +46,8 @@ * either {@link HoodieCopyOnWriteTableInputFormat} or {@link HoodieMergeOnReadTableInputFormat} to be supplied * to which it delegates all of its necessary methods. */ -public abstract class HoodieParquetInputFormatBase extends MapredParquetInputFormat implements Configurable { +public abstract class HoodieParquetInputFormatBase extends MapredParquetInputFormat + implements Configurable, SelfDescribingInputFormatInterface { private final HoodieTableInputFormat inputFormatDelegate; @@ -75,13 +77,13 @@ protected final boolean isSplitable(FileSystem fs, Path filename) { @Override protected final FileSplit makeSplit(Path file, long start, long length, - String[] hosts) { + String[] hosts) { return inputFormatDelegate.makeSplit(file, start, length, hosts); } @Override protected final FileSplit makeSplit(Path file, long start, long length, - String[] hosts, String[] inMemoryHosts) { + String[] hosts, String[] inMemoryHosts) { return inputFormatDelegate.makeSplit(file, start, length, hosts, inMemoryHosts); } diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieTableInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieTableInputFormat.java index d18cb7895ad00..580b132eaf322 100644 --- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieTableInputFormat.java +++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieTableInputFormat.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.io.SelfDescribingInputFormatInterface; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileInputFormat; @@ -36,7 +37,7 @@ * Copy-on-Write (COW) and Merge-on-Read (MOR) tables */ public abstract class HoodieTableInputFormat extends FileInputFormat - implements Configurable { + implements Configurable, SelfDescribingInputFormatInterface { protected Configuration conf; diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala index 3db9742aaf0cf..ed33a735e294f 100644 --- a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala +++ b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala @@ -17,15 +17,14 @@ package org.apache.spark.sql.hudi.command -import org.apache.hadoop.fs.Path import org.apache.hudi.common.model.{HoodieFileFormat, HoodieTableType} import org.apache.hudi.common.table.HoodieTableConfig import org.apache.hudi.common.util.ConfigUtils import org.apache.hudi.exception.HoodieException -import org.apache.hudi.hadoop.HoodieParquetInputFormat -import org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat import org.apache.hudi.hadoop.utils.HoodieInputFormatUtils import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport} + +import org.apache.hadoop.fs.Path import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.needFilterProps import org.apache.spark.sql.catalyst.catalog._ @@ -38,7 +37,6 @@ import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.{SPARK_VERSION, SparkConf} -import java.io.{PrintWriter, StringWriter} import scala.collection.JavaConverters._ import scala.collection.mutable import scala.util.control.NonFatal