diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java index 349a15da3a56..134afef943e1 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java @@ -29,6 +29,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.ozone.OzoneConsts; @@ -52,6 +54,9 @@ public class TarContainerPacker implements ContainerPacker { + private static final Logger LOG = + LoggerFactory.getLogger(TarContainerPacker.class); + static final String CHUNKS_DIR_NAME = OzoneConsts.STORAGE_DIR_CHUNKS; static final String DB_DIR_NAME = "db"; @@ -152,6 +157,20 @@ public void pack(Container container, try (OutputStream compressed = compress(output); ArchiveOutputStream archiveOutput = tar(compressed)) { + if (!containerData.getDbFile().exists()) { + LOG.warn("DBfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } else if (!new File(containerData.getChunksPath()).exists()) { + LOG.warn("Chunkfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } else if (!container.getContainerFile().exists()) { + LOG.warn("Containerfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } + includePath(containerData.getDbFile().toPath(), DB_DIR_NAME, archiveOutput); @@ -220,6 +239,9 @@ private void includePath(Path dir, String subdir, static void includeFile(File file, String entryName, ArchiveOutputStream archiveOutput) throws IOException { ArchiveEntry entry = archiveOutput.createArchiveEntry(file, entryName); + if (entry.getSize() == 0) { + return; + } archiveOutput.putArchiveEntry(entry); try (InputStream input = new FileInputStream(file)) { IOUtils.copy(input, archiveOutput); diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java index 0f87cf8efab4..f253114fadac 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java @@ -122,7 +122,15 @@ public void replicate(ReplicationTask task) { containerID, bytes); task.setTransferredBytes(bytes); - importContainer(containerID, path); + if (bytes <= 0) { + task.setStatus(Status.FAILED); + LOG.warn("Container {} is downloaded with size {}", + containerID, bytes); + } else { + importContainer(containerID, path); + LOG.info("Container {} is replicated successfully", containerID); + task.setStatus(Status.DONE); + } LOG.info("Container {} is replicated successfully", containerID); task.setStatus(Status.DONE); } catch (Exception e) {