diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java index 349a15da3a56..cb34ebd23ec9 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java @@ -43,6 +43,8 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static java.util.stream.Collectors.toList; @@ -51,6 +53,8 @@ */ public class TarContainerPacker implements ContainerPacker { + private static final Logger LOG = + LoggerFactory.getLogger(TarContainerPacker.class); static final String CHUNKS_DIR_NAME = OzoneConsts.STORAGE_DIR_CHUNKS; @@ -152,6 +156,20 @@ public void pack(Container container, try (OutputStream compressed = compress(output); ArchiveOutputStream archiveOutput = tar(compressed)) { + if (!containerData.getDbFile().exists()) { + LOG.warn("DBfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } else if (!new File(containerData.getChunksPath()).exists()) { + LOG.warn("Chunkfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } else if (!container.getContainerFile().exists()) { + LOG.warn("Containerfile {} not exist", + containerData.getDbFile().toPath().toString()); + return; + } + includePath(containerData.getDbFile().toPath(), DB_DIR_NAME, archiveOutput); diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java index 0f87cf8efab4..3b051e53b85c 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/DownloadAndImportReplicator.java @@ -121,10 +121,17 @@ public void replicate(ReplicationTask task) { LOG.info("Container {} is downloaded with size {}, starting to import.", containerID, bytes); task.setTransferredBytes(bytes); - - importContainer(containerID, path); - LOG.info("Container {} is replicated successfully", containerID); - task.setStatus(Status.DONE); + // if tar is null, the tar size is 45 bytes + if (bytes <= 45) { + task.setStatus(Status.FAILED); + LOG.warn("Container {} is downloaded with size {}, " + + "if size less than 45 bytes the tar file is null", + containerID, bytes); + } else { + importContainer(containerID, path); + LOG.info("Container {} is replicated successfully", containerID); + task.setStatus(Status.DONE); + } } catch (Exception e) { LOG.error("Container {} replication was unsuccessful.", containerID, e); task.setStatus(Status.FAILED); diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java index 7d9f3cdb1832..61d42f4eef95 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java @@ -61,6 +61,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.file.Files; import java.util.HashMap; import java.util.Map; import java.util.List; @@ -238,6 +239,55 @@ public void testContainerImportExport() throws Exception { } } + @Test + public void testContainerMissingFileImportExport() throws Exception { + long containerId = keyValueContainer.getContainerData().getContainerID(); + createContainer(); + long numberOfKeysToWrite = 12; + closeContainer(); + populate(numberOfKeysToWrite); + + //destination path + File folderToExport = folder.newFile("exported.tar.gz"); + TarContainerPacker packer = new TarContainerPacker(); + + //if missing chunksfile + File chunkfile = new File(keyValueContainer. + getContainerData().getChunksPath()); + Files.delete(chunkfile.toPath()); + Assert.assertFalse(chunkfile.exists()); + //export the container + try (FileOutputStream fos = new FileOutputStream(folderToExport)) { + keyValueContainer + .exportContainerData(fos, packer); + } + + //delete the original one + keyValueContainer.delete(); + + //create a new one + KeyValueContainerData containerData = + new KeyValueContainerData(containerId, + keyValueContainerData.getLayOutVersion(), + keyValueContainerData.getMaxSize(), UUID.randomUUID().toString(), + datanodeId.toString()); + KeyValueContainer container = new KeyValueContainer(containerData, CONF); + + HddsVolume containerVolume = volumeChoosingPolicy.chooseVolume(volumeSet + .getVolumesList(), 1); + String hddsVolumeDir = containerVolume.getHddsRootDir().toString(); + + container.populatePathFields(scmId, containerVolume, hddsVolumeDir); + long bytes = Files.size(folderToExport.toPath()); + Assert.assertTrue(bytes <= 45); + + try (FileInputStream fis = new FileInputStream(folderToExport)) { + container.importContainerData(fis, packer); + } catch (Exception ex) { + assertTrue(ex instanceof NullPointerException); + } + } + /** * Create the container on disk. */