diff --git a/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java b/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java index 3d56ffa445cd8..91e454bef6c49 100644 --- a/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java +++ b/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java @@ -46,7 +46,8 @@ public class HoodieMergeOnReadTestUtils { public static List getRecordsUsingInputFormat(List inputPaths, String basePath) throws IOException { JobConf jobConf = new JobConf(); - Schema schema = HoodieAvroUtils.addMetadataFields(Schema.parse(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA)); + Schema schema = HoodieAvroUtils.addMetadataFields( + new Schema.Parser().parse(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA)); HoodieParquetRealtimeInputFormat inputFormat = new HoodieParquetRealtimeInputFormat(); setPropsForInputFormat(inputFormat, jobConf, schema, basePath); return inputPaths.stream().map(path -> { diff --git a/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java b/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java index 1af2f209e5962..aa50953d544b3 100644 --- a/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java +++ b/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java @@ -100,7 +100,7 @@ public void testSchemaEvolutionOnUpdate() throws Exception { // Now try an update with an evolved schema // Evolved schema does not have guarantee on preserving the original field ordering final HoodieWriteConfig config2 = makeHoodieClientConfig("/exampleEvolvedSchema.txt"); - final Schema schema = Schema.parse(config2.getSchema()); + final Schema schema = new Schema.Parser().parse(config2.getSchema()); final WriteStatus insertResult = statuses.get(0); String fileId = insertResult.getFileId(); diff --git a/hudi-common/src/main/java/org/apache/hudi/common/io/storage/SizeAwareFSDataOutputStream.java b/hudi-common/src/main/java/org/apache/hudi/common/io/storage/SizeAwareFSDataOutputStream.java index 5f1c0ab93c32f..4da4b81fd24b5 100644 --- a/hudi-common/src/main/java/org/apache/hudi/common/io/storage/SizeAwareFSDataOutputStream.java +++ b/hudi-common/src/main/java/org/apache/hudi/common/io/storage/SizeAwareFSDataOutputStream.java @@ -43,7 +43,7 @@ public class SizeAwareFSDataOutputStream extends FSDataOutputStream { public SizeAwareFSDataOutputStream(Path path, FSDataOutputStream out, ConsistencyGuard consistencyGuard, Runnable closeCallback) throws IOException { - super(out); + super(out, null); this.path = path; this.closeCallback = closeCallback; this.consistencyGuard = consistencyGuard; diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java index f53fa3bac04bc..985c394fe1cfc 100644 --- a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java +++ b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java @@ -62,7 +62,7 @@ public HoodieAvroDataBlock(@Nonnull List records, @Nonnull Map footer) { super(header, footer, Option.empty(), Option.empty(), null, false); this.records = records; - this.schema = Schema.parse(super.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); + this.schema = new Schema.Parser().parse(super.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); } public HoodieAvroDataBlock(@Nonnull List records, @Nonnull Map header) { @@ -97,7 +97,7 @@ public byte[] getContentBytes() throws IOException { createRecordsFromContentBytes(); } - Schema schema = Schema.parse(super.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); + Schema schema = new Schema.Parser().parse(super.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); GenericDatumWriter writer = new GenericDatumWriter<>(schema); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream output = new DataOutputStream(baos); diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/LogReaderUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/LogReaderUtils.java index 0ec3a4bb857c3..b572b931fa724 100644 --- a/hudi-common/src/main/java/org/apache/hudi/common/util/LogReaderUtils.java +++ b/hudi-common/src/main/java/org/apache/hudi/common/util/LogReaderUtils.java @@ -51,7 +51,7 @@ private static Schema readSchemaFromLogFileInReverse(FileSystem fs, HoodieActive HoodieAvroDataBlock lastBlock = (HoodieAvroDataBlock) block; if (completedTimeline .containsOrBeforeTimelineStarts(lastBlock.getLogBlockHeader().get(HeaderMetadataType.INSTANT_TIME))) { - writerSchema = Schema.parse(lastBlock.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); + writerSchema = new Schema.Parser().parse(lastBlock.getLogBlockHeader().get(HeaderMetadataType.SCHEMA)); break; } } diff --git a/hudi-spark/src/main/scala/org/apache/hudi/AvroConversionUtils.scala b/hudi-spark/src/main/scala/org/apache/hudi/AvroConversionUtils.scala index e857b2569b117..a3ce3c36e6939 100644 --- a/hudi-spark/src/main/scala/org/apache/hudi/AvroConversionUtils.scala +++ b/hudi-spark/src/main/scala/org/apache/hudi/AvroConversionUtils.scala @@ -48,12 +48,12 @@ object AvroConversionUtils { ss.createDataFrame(rdd.mapPartitions { records => if (records.isEmpty) Iterator.empty else { - val schema = Schema.parse(schemaStr) + val schema = new Schema.Parser().parse(schemaStr) val dataType = convertAvroSchemaToStructType(schema) val convertor = AvroConversionHelper.createConverterToRow(schema, dataType) records.map { x => convertor(x).asInstanceOf[Row] } } - }, convertAvroSchemaToStructType(Schema.parse(schemaStr))).asInstanceOf[Dataset[Row]] + }, convertAvroSchemaToStructType(new Schema.Parser().parse(schemaStr))).asInstanceOf[Dataset[Row]] } }