Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
package org.apache.iceberg.actions;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.iceberg.CombinedScanTask;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.ContentFile;
import org.apache.iceberg.FileScanTask;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.RewriteFiles;
Expand All @@ -37,6 +38,7 @@
import org.apache.iceberg.io.CloseableIterable;
import org.apache.iceberg.io.CloseableIterator;
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.io.RewriteResult;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.ListMultimap;
Expand Down Expand Up @@ -196,7 +198,7 @@ public BaseRewriteDataFilesAction<ThisT> filter(Expression expr) {

@Override
public RewriteDataFilesActionResult execute() {
CloseableIterable<FileScanTask> fileScanTasks = null;
CloseableIterable<FileScanTask> fileScanTasks = CloseableIterable.empty();
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure whether this name RewriteDataFilesActionResult should be renamed to RewriteFilesActionResult because the rewrite action is removing all the deletions from files set, it also rewrite the delete files actually.

try {
fileScanTasks = table.newScan()
.caseSensitive(caseSensitive)
Expand All @@ -215,13 +217,14 @@ public RewriteDataFilesActionResult execute() {

Map<StructLikeWrapper, Collection<FileScanTask>> groupedTasks = groupTasksByPartition(fileScanTasks.iterator());
Map<StructLikeWrapper, Collection<FileScanTask>> filteredGroupedTasks = groupedTasks.entrySet().stream()
.filter(kv -> kv.getValue().size() > 1)
.filter(partitionTasks -> doPartitionNeedRewrite(partitionTasks.getValue()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

// Nothing to rewrite if there's only one DataFile in each partition.
// Nothing to rewrite if there's only one file in each partition.
if (filteredGroupedTasks.isEmpty()) {
return RewriteDataFilesActionResult.empty();
}

// Split and combine tasks under each partition
List<CombinedScanTask> combinedScanTasks = filteredGroupedTasks.values().stream()
.map(scanTasks -> {
Expand All @@ -230,22 +233,26 @@ public RewriteDataFilesActionResult execute() {
return TableScanUtil.planTasks(splitTasks, targetSizeInBytes, splitLookback, splitOpenFileCost);
})
.flatMap(Streams::stream)
.filter(task -> task.files().size() > 1 || isPartialFileScan(task))
.filter(this::doTaskNeedRewrite)
.collect(Collectors.toList());

if (combinedScanTasks.isEmpty()) {
return RewriteDataFilesActionResult.empty();
}

List<DataFile> addedDataFiles = rewriteDataForTasks(combinedScanTasks);
List<DataFile> currentDataFiles = combinedScanTasks.stream()
.flatMap(tasks -> tasks.files().stream().map(FileScanTask::file))
.collect(Collectors.toList());
replaceDataFiles(currentDataFiles, addedDataFiles);
// Execute the real rewrite tasks in parallelism.
RewriteResult rewriteResult = rewriteDataForTasks(combinedScanTasks);

return new RewriteDataFilesActionResult(currentDataFiles, addedDataFiles);
}
// Commit the RewriteFiles transaction to iceberg table.
replaceDataFiles(rewriteResult);

return new RewriteDataFilesActionResult(
Lists.newArrayList(rewriteResult.dataFilesToDelete()),
Lists.newArrayList(rewriteResult.deleteFilesToDelete()),
Lists.newArrayList(rewriteResult.dataFilesToAdd()),
Lists.newArrayList(rewriteResult.deleteFilesToAdd())
);
}

private Map<StructLikeWrapper, Collection<FileScanTask>> groupTasksByPartition(
CloseableIterator<FileScanTask> tasksIter) {
Expand All @@ -262,31 +269,64 @@ private Map<StructLikeWrapper, Collection<FileScanTask>> groupTasksByPartition(
return tasksGroupedByPartition.asMap();
}

private void replaceDataFiles(Iterable<DataFile> deletedDataFiles, Iterable<DataFile> addedDataFiles) {
private void replaceDataFiles(RewriteResult result) {
try {
RewriteFiles rewriteFiles = table.newRewrite();
rewriteFiles.rewriteFiles(Sets.newHashSet(deletedDataFiles), Sets.newHashSet(addedDataFiles));

rewriteFiles.rewriteFiles(
Sets.newHashSet(result.dataFilesToDelete()),
Sets.newHashSet(result.deleteFilesToDelete()),
Sets.newHashSet(result.dataFilesToAdd()),
Sets.newHashSet(result.deleteFilesToAdd())
);

commit(rewriteFiles);
} catch (Exception e) {
Tasks.foreach(Iterables.transform(addedDataFiles, f -> f.path().toString()))
// Remove all the newly produced files if possible.
Iterable<ContentFile<?>> addedFiles = Iterables.concat(
Arrays.asList(result.dataFilesToAdd()),
Arrays.asList(result.deleteFilesToAdd())
);

Tasks.foreach(Iterables.transform(addedFiles, f -> f.path().toString()))
.noRetry()
.suppressFailureWhenFinished()
.onFailure((location, exc) -> LOG.warn("Failed to delete: {}", location, exc))
.run(fileIO::deleteFile);

throw e;
}
}

private boolean isPartialFileScan(CombinedScanTask task) {
private boolean doPartitionNeedRewrite(Collection<FileScanTask> partitionTasks) {
int files = 0;
for (FileScanTask scanTask : partitionTasks) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe this would break splitting large files, since a file scan task with only a single file will never be marked for rewrite.

files += 1; // One for data file.
files += scanTask.deletes().size();
}
return files > 1;
}

private boolean doTaskNeedRewrite(CombinedScanTask task) {
Preconditions.checkArgument(task != null && task.files().size() > 0,
"Files in CombinedScanTask could not be null or empty");
if (task.files().size() == 1) {
FileScanTask fileScanTask = task.files().iterator().next();
return fileScanTask.file().fileSizeInBytes() != fileScanTask.length();
FileScanTask scanTask = task.files().iterator().next();
if (scanTask.deletes().size() > 0) {
// There are 1 data file and several delete files, we need to rewrite them into one data file.
return true;
} else {
// There is only 1 data file. If the rewrite data bytes happens to be a complete data file, then we don't
// need to do the real rewrite action.
return scanTask.file().fileSizeInBytes() != scanTask.length();
}
} else {
return false;
// There are multiple FileScanTask.
return true;
}
}

protected abstract FileIO fileIO();

protected abstract List<DataFile> rewriteDataForTasks(List<CombinedScanTask> combinedScanTask);
protected abstract RewriteResult rewriteDataForTasks(List<CombinedScanTask> combinedScanTask);
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.List;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;

public class RewriteDataFilesActionResult {
Expand All @@ -29,11 +30,21 @@ public class RewriteDataFilesActionResult {
new RewriteDataFilesActionResult(ImmutableList.of(), ImmutableList.of());

private List<DataFile> deletedDataFiles;
private List<DeleteFile> deletedDeleteFiles;

private List<DataFile> addedDataFiles;
private List<DeleteFile> addedDeleteFiles;

public RewriteDataFilesActionResult(List<DataFile> deletedDataFiles, List<DataFile> addedDataFiles) {
this(deletedDataFiles, ImmutableList.of(), addedDataFiles, ImmutableList.of());
}

public RewriteDataFilesActionResult(List<DataFile> deletedDataFiles, List<DeleteFile> deletedDeleteFiles,
List<DataFile> addedDataFiles, List<DeleteFile> addedDeleteFiles) {
this.deletedDataFiles = deletedDataFiles;
this.deletedDeleteFiles = deletedDeleteFiles;
this.addedDataFiles = addedDataFiles;
this.addedDeleteFiles = addedDeleteFiles;
}

static RewriteDataFilesActionResult empty() {
Expand All @@ -44,7 +55,15 @@ public List<DataFile> deletedDataFiles() {
return deletedDataFiles;
}

public List<DeleteFile> deletedDeleteFiles() {
return deletedDeleteFiles;
}

public List<DataFile> addedDataFiles() {
return addedDataFiles;
}

public List<DeleteFile> addedDeleteFiles() {
return addedDeleteFiles;
}
}
139 changes: 139 additions & 0 deletions core/src/main/java/org/apache/iceberg/io/RewriteResult.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.io;

import java.io.Serializable;
import java.util.Collections;
import java.util.Set;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;

public class RewriteResult implements Serializable {
// Files to delete.
private final DataFile[] dataFilesToDelete;
private final DeleteFile[] deleteFilesToDelete;

// Files to add.
private final DataFile[] dataFilesToAdd;
private final DeleteFile[] deleteFilesToAdd;

private RewriteResult(Set<DataFile> dataFilesToDelete,
Set<DeleteFile> deleteFilesToDelete,
Set<DataFile> dataFilesToAdd,
Set<DeleteFile> deleteFilesToAdd) {
this.dataFilesToDelete = dataFilesToDelete.toArray(new DataFile[0]);
this.deleteFilesToDelete = deleteFilesToDelete.toArray(new DeleteFile[0]);
this.dataFilesToAdd = dataFilesToAdd.toArray(new DataFile[0]);
this.deleteFilesToAdd = deleteFilesToAdd.toArray(new DeleteFile[0]);
}

public DataFile[] dataFilesToDelete() {
return dataFilesToDelete;
}

public DeleteFile[] deleteFilesToDelete() {
return deleteFilesToDelete;
}

public DataFile[] dataFilesToAdd() {
return dataFilesToAdd;
}

public DeleteFile[] deleteFilesToAdd() {
return deleteFilesToAdd;
}

public static Builder builder() {
return new Builder();
}

public static class Builder {
private final Set<DataFile> dataFilesToDelete = Sets.newHashSet();
private final Set<DeleteFile> deleteFilesToDelete = Sets.newHashSet();
private final Set<DataFile> dataFilesToAdd = Sets.newHashSet();
private final Set<DeleteFile> deleteFilesToAdd = Sets.newHashSet();

public Builder addDataFilesToDelete(DataFile... dataFiles) {
Collections.addAll(dataFilesToDelete, dataFiles);
return this;
}

public Builder addDataFilesToDelete(Iterable<DataFile> dataFiles) {
Iterables.addAll(dataFilesToDelete, dataFiles);
return this;
}

public Builder addDeleteFilesToDelete(DeleteFile... deleteFiles) {
Collections.addAll(deleteFilesToDelete, deleteFiles);
return this;
}

public Builder addDeleteFilesToDelete(Iterable<DeleteFile> deleteFiles) {
Iterables.addAll(deleteFilesToDelete, deleteFiles);
return this;
}

public Builder addDataFilesToAdd(DataFile... dataFiles) {
Collections.addAll(dataFilesToAdd, dataFiles);
return this;
}

public Builder addDataFilesToAdd(Iterable<DataFile> dataFiles) {
Iterables.addAll(dataFilesToAdd, dataFiles);
return this;
}

public Builder addDeleteFilesToAdd(Iterable<DeleteFile> deleteFiles) {
Iterables.addAll(deleteFilesToAdd, deleteFiles);
return this;
}

public Builder addDeleteFilesToAdd(DeleteFile... deleteFiles) {
Collections.addAll(deleteFilesToAdd, deleteFiles);
return this;
}

public Builder merge(Iterable<RewriteResult> results) {
for (RewriteResult result : results) {
Collections.addAll(dataFilesToDelete, result.dataFilesToDelete);
Collections.addAll(deleteFilesToDelete, result.deleteFilesToDelete);
Collections.addAll(dataFilesToAdd, result.dataFilesToAdd);
Collections.addAll(deleteFilesToAdd, result.deleteFilesToAdd);
}
return this;
}

public Builder merge(RewriteResult... results) {
for (RewriteResult result : results) {
Collections.addAll(dataFilesToDelete, result.dataFilesToDelete);
Collections.addAll(deleteFilesToDelete, result.deleteFilesToDelete);
Collections.addAll(dataFilesToAdd, result.dataFilesToAdd);
Collections.addAll(deleteFilesToAdd, result.deleteFilesToAdd);
}
return this;
}

public RewriteResult build() {
return new RewriteResult(dataFilesToDelete, deleteFilesToDelete, dataFilesToAdd, deleteFilesToAdd);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.iceberg.CombinedScanTask;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.Table;
import org.apache.iceberg.actions.BaseRewriteDataFilesAction;
import org.apache.iceberg.flink.source.RowDataRewriter;
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.io.RewriteResult;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;

public class RewriteDataFilesAction extends BaseRewriteDataFilesAction<RewriteDataFilesAction> {
Expand All @@ -47,9 +47,10 @@ protected FileIO fileIO() {
}

@Override
protected List<DataFile> rewriteDataForTasks(List<CombinedScanTask> combinedScanTasks) {
protected RewriteResult rewriteDataForTasks(List<CombinedScanTask> combinedScanTasks) {
int size = combinedScanTasks.size();
int parallelism = Math.min(size, maxParallelism);

DataStream<CombinedScanTask> dataStream = env.fromCollection(combinedScanTasks);
RowDataRewriter rowDataRewriter = new RowDataRewriter(table(), caseSensitive(), fileIO(), encryptionManager());
try {
Expand Down
Loading