-
Notifications
You must be signed in to change notification settings - Fork 2.5k
[HUDI-4808] Fix HoodieSimpleBucketIndex not consider bucket num in lo… #6630
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -25,7 +25,6 @@ | |
| import org.apache.hudi.exception.HoodieIOException; | ||
| import org.apache.hudi.index.HoodieIndexUtils; | ||
| import org.apache.hudi.table.HoodieTable; | ||
|
|
||
| import org.apache.log4j.LogManager; | ||
| import org.apache.log4j.Logger; | ||
|
|
||
|
|
@@ -52,10 +51,11 @@ private Map<Integer, HoodieRecordLocation> loadPartitionBucketIdFileIdMapping( | |
| Map<Integer, HoodieRecordLocation> bucketIdToFileIdMapping = new HashMap<>(); | ||
| hoodieTable.getMetaClient().reloadActiveTimeline(); | ||
| HoodieIndexUtils | ||
| .getLatestBaseFilesForPartition(partition, hoodieTable) | ||
| .forEach(file -> { | ||
| String fileId = file.getFileId(); | ||
| String commitTime = file.getCommitTime(); | ||
| .getLatestFileSlicesForPartition(partition, hoodieTable) | ||
| .forEach(fileSlice -> { | ||
| String fileId = fileSlice.getFileId(); | ||
| String commitTime = fileSlice.getBaseInstantTime(); | ||
|
Comment on lines
+56
to
+57
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hi @danny0405 Thanks for your suggestion, I have corrected it, please review again : ) |
||
|
|
||
| int bucketId = BucketIdentifier.bucketIdFromFileId(fileId); | ||
| if (!bucketIdToFileIdMapping.containsKey(bucketId)) { | ||
| bucketIdToFileIdMapping.put(bucketId, new HoodieRecordLocation(commitTime, fileId)); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -32,7 +32,6 @@ | |
| import org.apache.hudi.common.model.HoodieFileFormat; | ||
| import org.apache.hudi.common.model.HoodieLogFile; | ||
| import org.apache.hudi.common.model.HoodieRecord; | ||
| import org.apache.hudi.common.model.HoodieRecordLocation; | ||
| import org.apache.hudi.common.model.HoodieRecordPayload; | ||
| import org.apache.hudi.common.table.HoodieTableConfig; | ||
| import org.apache.hudi.common.table.HoodieTableMetaClient; | ||
|
|
@@ -45,9 +44,9 @@ | |
| import org.apache.hudi.common.util.collection.Pair; | ||
| import org.apache.hudi.config.HoodieStorageConfig; | ||
| import org.apache.hudi.io.storage.HoodieAvroParquetWriter; | ||
| import org.apache.hudi.io.storage.HoodieParquetConfig; | ||
| import org.apache.hudi.io.storage.HoodieOrcConfig; | ||
| import org.apache.hudi.io.storage.HoodieOrcWriter; | ||
| import org.apache.hudi.io.storage.HoodieParquetConfig; | ||
| import org.apache.hudi.metadata.HoodieTableMetadataWriter; | ||
| import org.apache.log4j.LogManager; | ||
| import org.apache.log4j.Logger; | ||
|
|
@@ -152,27 +151,21 @@ public Path withInserts(String partition, String fileId, List<HoodieRecord> reco | |
| return baseFilePath; | ||
| } | ||
|
|
||
| public Map<String, List<HoodieLogFile>> withLogAppends(List<HoodieRecord> records) throws Exception { | ||
| public Map<String, List<HoodieLogFile>> withLogAppends(String partition, String fileId, List<HoodieRecord> records) throws Exception { | ||
| Map<String, List<HoodieLogFile>> partitionToLogfilesMap = new HashMap<>(); | ||
| for (List<HoodieRecord> groupedRecords : records.stream() | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is there any special reason we rename the method from
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Seems |
||
| .collect(Collectors.groupingBy(HoodieRecord::getCurrentLocation)).values()) { | ||
| final Pair<String, HoodieLogFile> appendedLogFile = appendRecordsToLogFile(groupedRecords); | ||
| partitionToLogfilesMap.computeIfAbsent( | ||
| appendedLogFile.getKey(), k -> new ArrayList<>()).add(appendedLogFile.getValue()); | ||
| } | ||
| final Pair<String, HoodieLogFile> appendedLogFile = appendRecordsToLogFile(partition, fileId, records); | ||
| partitionToLogfilesMap.computeIfAbsent(appendedLogFile.getKey(), k -> new ArrayList<>()).add(appendedLogFile.getValue()); | ||
| return partitionToLogfilesMap; | ||
| } | ||
|
|
||
| private Pair<String, HoodieLogFile> appendRecordsToLogFile(List<HoodieRecord> groupedRecords) throws Exception { | ||
| String partitionPath = groupedRecords.get(0).getPartitionPath(); | ||
| HoodieRecordLocation location = groupedRecords.get(0).getCurrentLocation(); | ||
| private Pair<String, HoodieLogFile> appendRecordsToLogFile(String partitionPath, String fileId, List<HoodieRecord> records) throws Exception { | ||
| try (HoodieLogFormat.Writer logWriter = HoodieLogFormat.newWriterBuilder().onParentPath(new Path(basePath, partitionPath)) | ||
| .withFileExtension(HoodieLogFile.DELTA_EXTENSION).withFileId(location.getFileId()) | ||
| .overBaseCommit(location.getInstantTime()).withFs(fs).build()) { | ||
| .withFileExtension(HoodieLogFile.DELTA_EXTENSION).withFileId(fileId) | ||
| .overBaseCommit(currentInstantTime).withFs(fs).build()) { | ||
| Map<HoodieLogBlock.HeaderMetadataType, String> header = new HashMap<>(); | ||
| header.put(HoodieLogBlock.HeaderMetadataType.INSTANT_TIME, location.getInstantTime()); | ||
| header.put(HoodieLogBlock.HeaderMetadataType.INSTANT_TIME, currentInstantTime); | ||
| header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, schema.toString()); | ||
| logWriter.appendBlock(new HoodieAvroDataBlock(groupedRecords.stream().map(r -> { | ||
| logWriter.appendBlock(new HoodieAvroDataBlock(records.stream().map(r -> { | ||
| try { | ||
| GenericRecord val = (GenericRecord) ((HoodieRecordPayload) r.getData()).getInsertValue(schema).get(); | ||
| HoodieAvroUtils.addHoodieKeyToRecord(val, r.getRecordKey(), r.getPartitionPath(), ""); | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Seems reasonable fix, can we add a test case for it.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
+1
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ok, I will add a test for this pr.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Hi @danny0405 , I have added a test for log type, please review again.