diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java index 81a6935098af..11f54be8b93b 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java @@ -280,22 +280,22 @@ public static long getContainerID(File containerBaseDir) { return Long.parseLong(containerBaseDir.getName()); } - public static String getContainerTarGzName(long containerId) { - return "container-" + containerId + ".tar.gz"; + public static String getContainerTarName(long containerId) { + return "container-" + containerId + ".tar"; } - public static long retrieveContainerIdFromTarGzName(String tarGzName) + public static long retrieveContainerIdFromTarName(String tarName) throws IOException { - assert tarGzName != null; - Pattern pattern = Pattern.compile("container-(\\d+).tar.gz"); + assert tarName != null; + Pattern pattern = Pattern.compile("container-(\\d+).tar"); // Now create matcher object. - Matcher m = pattern.matcher(tarGzName); + Matcher m = pattern.matcher(tarName); if (m.find()) { return Long.parseLong(m.group(1)); } else { throw new IOException("Illegal container tar gz file " + - tarGzName); + tarName); } } } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java index 3d8c445a84fe..2f9c930191f7 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java @@ -58,7 +58,7 @@ import static org.apache.hadoop.ozone.OzoneConsts.SCHEMA_V3; /** - * Compress/uncompress KeyValueContainer data to a tar.gz archive. + * Compress/uncompress KeyValueContainer data to a tar archive. */ public class TarContainerPacker implements ContainerPacker { diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java index 88da4d994f07..fa4140040c8f 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java @@ -109,7 +109,7 @@ public CompletableFuture download(long containerId) { CompletableFuture response = new CompletableFuture<>(); Path destinationPath = getWorkingDirectory() - .resolve(ContainerUtils.getContainerTarGzName(containerId)); + .resolve(ContainerUtils.getContainerTarName(containerId)); client.download(request, new StreamDownloader(containerId, response, destinationPath)); diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java index 51b929eb2b45..1bfdf8ae947f 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java @@ -59,12 +59,12 @@ public void redactsDataBuffers() { } @Test - public void testTarGzName() throws IOException { + public void testTarName() throws IOException { long containerId = 100; - String tarGzName = "container-100.tar.gz"; - assertEquals(tarGzName, ContainerUtils.getContainerTarGzName(containerId)); + String tarName = "container-100.tar"; + assertEquals(tarName, ContainerUtils.getContainerTarName(containerId)); assertEquals(containerId, - ContainerUtils.retrieveContainerIdFromTarGzName(tarGzName)); + ContainerUtils.retrieveContainerIdFromTarName(tarName)); } } diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java index 62a432e514a6..734b8d319384 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java @@ -193,7 +193,7 @@ public void testEmptyContainerImportExport() throws Exception { checkContainerFilesPresent(data, 0); //destination path - File exportTar = folder.newFile("exported.tar.gz"); + File exportTar = folder.newFile("exported.tar"); TarContainerPacker packer = new TarContainerPacker(); //export the container try (FileOutputStream fos = new FileOutputStream(exportTar)) { @@ -220,7 +220,7 @@ public void testContainerImportExport() throws Exception { populate(numberOfKeysToWrite); //destination path - File folderToExport = folder.newFile("exported.tar.gz"); + File folderToExport = folder.newFile("exported.tar"); for (Map.Entry entry : CopyContainerCompression.getCompressionMapping().entrySet()) { TarContainerPacker packer = new TarContainerPacker(entry.getValue()); @@ -368,7 +368,7 @@ public void concurrentExport() throws Exception { List threads = IntStream.range(0, 20) .mapToObj(i -> new Thread(() -> { try { - File file = folder.newFile("concurrent" + i + ".tar.gz"); + File file = folder.newFile("concurrent" + i + ".tar"); try (OutputStream out = new FileOutputStream(file)) { keyValueContainer.exportContainerData(out, packer); } diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java index 9e16fb88bc7e..28b77bea5dd8 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java @@ -182,7 +182,7 @@ public void pack() throws IOException, CompressorException { //sample container descriptor file writeDescriptor(sourceContainer); - Path targetFile = TEMP_DIR.resolve("container.tar.gz"); + Path targetFile = TEMP_DIR.resolve("container.tar"); //WHEN: pack it try (FileOutputStream output = new FileOutputStream(targetFile.toFile())) { @@ -372,7 +372,7 @@ private File writeSingleFile(Path parentPath, String fileName, private File packContainerWithSingleFile(File file, String entryName) throws Exception { - File targetFile = TEMP_DIR.resolve("container.tar.gz").toFile(); + File targetFile = TEMP_DIR.resolve("container.tar").toFile(); try (FileOutputStream output = new FileOutputStream(targetFile); OutputStream compressed = packer.compress(output); ArchiveOutputStream archive = new TarArchiveOutputStream(compressed)) { diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java index 1e01d0808043..b73c04d39926 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java @@ -674,7 +674,7 @@ public void importContainer(long containerID, File source) throws Exception { new TarContainerPacker(), dsm.getContainer().getVolumeSet()); File tempFile = tempFolder.newFile( - ContainerUtils.getContainerTarGzName(containerID)); + ContainerUtils.getContainerTarName(containerID)); Files.copy(source.toPath(), tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING); replicator.importContainer(containerID, tempFile.toPath(), null); diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java index 03f2fdca390b..a8f99149e575 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java @@ -21,31 +21,19 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; -import java.io.OutputStream; -import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; import java.time.Instant; import java.util.Collection; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.apache.hadoop.hdds.server.OzoneAdmins; import org.apache.hadoop.hdds.utils.db.DBCheckpoint; import org.apache.hadoop.hdds.utils.db.DBStore; -import org.apache.commons.compress.archivers.ArchiveEntry; -import org.apache.commons.compress.archivers.ArchiveOutputStream; -import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; -import org.apache.commons.compress.compressors.CompressorException; -import org.apache.commons.compress.compressors.CompressorOutputStream; -import org.apache.commons.compress.compressors.CompressorStreamFactory; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.lang3.StringUtils; +import static org.apache.hadoop.hdds.utils.HddsServerUtil.writeDBCheckpointToStream; import static org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_FLUSH; import org.apache.hadoop.security.UserGroupInformation; @@ -53,7 +41,7 @@ import org.slf4j.LoggerFactory; /** - * Provides the current checkpoint Snapshot of the OM/SCM DB. (tar.gz) + * Provides the current checkpoint Snapshot of the OM/SCM DB. (tar) */ public class DBCheckpointServlet extends HttpServlet { @@ -168,10 +156,10 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { if (file == null) { return; } - response.setContentType("application/x-tgz"); + response.setContentType("application/x-tar"); response.setHeader("Content-Disposition", "attachment; filename=\"" + - file.toString() + ".tgz\""); + file + ".tar\""); Instant start = Instant.now(); writeDBCheckpointToStream(checkpoint, @@ -200,53 +188,4 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { } } - /** - * Write DB Checkpoint to an output stream as a compressed file (tgz). - * - * @param checkpoint checkpoint file - * @param destination desination output stream. - * @throws IOException - */ - public static void writeDBCheckpointToStream(DBCheckpoint checkpoint, - OutputStream destination) - throws IOException { - - try (CompressorOutputStream gzippedOut = new CompressorStreamFactory() - .createCompressorOutputStream(CompressorStreamFactory.GZIP, - destination)) { - - try (ArchiveOutputStream archiveOutputStream = - new TarArchiveOutputStream(gzippedOut)) { - - Path checkpointPath = checkpoint.getCheckpointLocation(); - try (Stream files = Files.list(checkpointPath)) { - for (Path path : files.collect(Collectors.toList())) { - if (path != null) { - Path fileName = path.getFileName(); - if (fileName != null) { - includeFile(path.toFile(), fileName.toString(), - archiveOutputStream); - } - } - } - } - } - } catch (CompressorException e) { - throw new IOException( - "Can't compress the checkpoint: " + - checkpoint.getCheckpointLocation(), e); - } - } - - private static void includeFile(File file, String entryName, - ArchiveOutputStream archiveOutputStream) - throws IOException { - ArchiveEntry archiveEntry = - archiveOutputStream.createArchiveEntry(file, entryName); - archiveOutputStream.putArchiveEntry(archiveEntry); - try (FileInputStream fis = new FileInputStream(file)) { - IOUtils.copy(fis, archiveOutputStream); - } - archiveOutputStream.closeArchiveEntry(); - } } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java index 80970ff1bf65..ca119192849e 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java @@ -37,9 +37,6 @@ import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveOutputStream; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; -import org.apache.commons.compress.compressors.CompressorException; -import org.apache.commons.compress.compressors.CompressorOutputStream; -import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.commons.compress.utils.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdds.DFSConfigKeysLegacy; @@ -526,7 +523,7 @@ public static MetricsSystem initializeMetrics( } /** - * Write DB Checkpoint to an output stream as a compressed file (tgz). + * Write DB Checkpoint to an output stream as a compressed file (tar). * * @param checkpoint checkpoint file * @param destination destination output stream. @@ -535,11 +532,8 @@ public static MetricsSystem initializeMetrics( public static void writeDBCheckpointToStream(DBCheckpoint checkpoint, OutputStream destination) throws IOException { - try (CompressorOutputStream gzippedOut = new CompressorStreamFactory() - .createCompressorOutputStream(CompressorStreamFactory.GZIP, - destination); - ArchiveOutputStream archiveOutputStream = - new TarArchiveOutputStream(gzippedOut); + try (ArchiveOutputStream archiveOutputStream = + new TarArchiveOutputStream(destination); Stream files = Files.list(checkpoint.getCheckpointLocation())) { for (Path path : files.collect(Collectors.toList())) { @@ -551,15 +545,11 @@ public static void writeDBCheckpointToStream(DBCheckpoint checkpoint, } } } - } catch (CompressorException e) { - throw new IOException( - "Can't compress the checkpoint: " + - checkpoint.getCheckpointLocation(), e); } } private static void includeFile(File file, String entryName, - ArchiveOutputStream archiveOutputStream) + ArchiveOutputStream archiveOutputStream) throws IOException { ArchiveEntry archiveEntry = archiveOutputStream.createArchiveEntry(file, entryName); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java index ac6716c7d45c..7df404678fe6 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java @@ -104,7 +104,7 @@ public DBCheckpoint getSCMDBSnapshot(String leaderSCMNodeID) String snapshotFilePath = Paths.get(scmSnapshotDir.getAbsolutePath(), snapshotFileName).toFile() .getAbsolutePath(); - File targetFile = new File(snapshotFilePath + ".tar.gz"); + File targetFile = new File(snapshotFilePath + ".tar"); // the downloadClient instance will be created as and when install snapshot diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java index 8a91388def4d..f473b62e74c2 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java @@ -131,7 +131,7 @@ public void testDoGet() throws ServletException, IOException { Matchers.anyString()); tempFile = File.createTempFile("testDoGet_" + System - .currentTimeMillis(), ".tar.gz"); + .currentTimeMillis(), ".tar"); FileOutputStream fileOutputStream = new FileOutputStream(tempFile); when(responseMock.getOutputStream()).thenReturn( diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java index 90743f2e17fe..403c99f58672 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java @@ -24,7 +24,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.File; -import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; @@ -38,7 +37,6 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.utils.db.DBCheckpoint; -import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.security.UserGroupInformation; @@ -46,12 +44,12 @@ import org.apache.commons.io.FileUtils; import static org.apache.hadoop.hdds.recon.ReconConfig.ConfigStrings.OZONE_RECON_KERBEROS_PRINCIPAL_KEY; +import static org.apache.hadoop.hdds.utils.HddsServerUtil.writeDBCheckpointToStream; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ACL_ENABLED; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS_WILDCARD; import static org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_FLUSH; import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_HTTP_AUTH_TYPE; -import static org.apache.hadoop.ozone.om.OMDBCheckpointServlet.writeDBCheckpointToStream; import org.junit.After; import org.junit.Assert; @@ -101,7 +99,7 @@ public void init() throws Exception { conf = new OzoneConfiguration(); tempFile = File.createTempFile("testDoGet_" + System - .currentTimeMillis(), ".tar.gz"); + .currentTimeMillis(), ".tar"); FileOutputStream fileOutputStream = new FileOutputStream(tempFile); @@ -181,7 +179,7 @@ public void testDoGet() throws Exception { om.getOmAdminGroups(), om.isSpnegoEnabled()); - doNothing().when(responseMock).setContentType("application/x-tgz"); + doNothing().when(responseMock).setContentType("application/x-tar"); doNothing().when(responseMock).setHeader(Matchers.anyString(), Matchers.anyString()); @@ -261,34 +259,28 @@ public void testSpnegoEnabled() throws Exception { @Test public void testWriteCheckpointToOutputStream() throws Exception { - FileInputStream fis = null; - FileOutputStream fos = null; - - try { - String testDirName = folder.newFolder().getAbsolutePath(); - File file = new File(testDirName + "/temp1.txt"); - OutputStreamWriter writer = new OutputStreamWriter( - new FileOutputStream(file), StandardCharsets.UTF_8); - writer.write("Test data 1"); - writer.close(); - - file = new File(testDirName + "/temp2.txt"); - writer = new OutputStreamWriter( - new FileOutputStream(file), StandardCharsets.UTF_8); - writer.write("Test data 2"); - writer.close(); - - File outputFile = - new File(Paths.get(testDirName, "output_file.tgz").toString()); - TestDBCheckpoint dbCheckpoint = new TestDBCheckpoint( - Paths.get(testDirName)); - writeDBCheckpointToStream(dbCheckpoint, - new FileOutputStream(outputFile)); - assertNotNull(outputFile); - } finally { - IOUtils.closeStream(fis); - IOUtils.closeStream(fos); - } + String testDirName = folder.newFolder().getAbsolutePath(); + File checkpoint = new File(testDirName, "checkpoint"); + checkpoint.mkdir(); + File file = new File(checkpoint, "temp1.txt"); + OutputStreamWriter writer = new OutputStreamWriter( + new FileOutputStream(file), StandardCharsets.UTF_8); + writer.write("Test data 1"); + writer.close(); + + file = new File(checkpoint, "/temp2.txt"); + writer = new OutputStreamWriter( + new FileOutputStream(file), StandardCharsets.UTF_8); + writer.write("Test data 2"); + writer.close(); + + File outputFile = + new File(Paths.get(testDirName, "output_file.tar").toString()); + TestDBCheckpoint dbCheckpoint = new TestDBCheckpoint( + checkpoint.toPath()); + writeDBCheckpointToStream(dbCheckpoint, + new FileOutputStream(outputFile)); + assertNotNull(outputFile); } } diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java index 3ef2661f822d..5c043a1accf8 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java @@ -114,7 +114,7 @@ public DBCheckpoint getOzoneManagerDBSnapshot(String leaderOMNodeID) + "-" + snapshotTime; String snapshotFilePath = Paths.get(omSnapshotDir.getAbsolutePath(), snapshotFileName).toFile().getAbsolutePath(); - File targetFile = new File(snapshotFilePath + ".tar.gz"); + File targetFile = new File(snapshotFilePath + ".tar"); String omCheckpointUrl = peerNodesMap.get(leaderOMNodeID) .getOMDBCheckpointEnpointUrl(httpPolicy.isHttpEnabled()); diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java index c6f735e7af83..06b57eb3ddd0 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java @@ -31,7 +31,6 @@ import java.nio.file.Paths; import java.security.KeyPair; import java.sql.Timestamp; -import java.util.zip.GZIPOutputStream; import com.google.inject.Singleton; import org.apache.hadoop.hdds.HddsConfigKeys; @@ -46,7 +45,6 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import static org.apache.hadoop.hdds.server.ServerUtils.getDirectoryFromConfig; import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR; @@ -104,23 +102,20 @@ public File getReconDbDir(ConfigurationSource conf, String dirConfigKey) { } /** - * Given a source directory, create a tar.gz file from it. + * Given a source directory, create a tar file from it. * * @param sourcePath the path to the directory to be archived. - * @return tar.gz file + * @return tar file * @throws IOException */ public static File createTarFile(Path sourcePath) throws IOException { TarArchiveOutputStream tarOs = null; FileOutputStream fileOutputStream = null; - GZIPOutputStream gzipOutputStream = null; try { String sourceDir = sourcePath.toString(); - String fileName = sourceDir.concat(".tar.gz"); + String fileName = sourceDir.concat(".tar"); fileOutputStream = new FileOutputStream(fileName); - gzipOutputStream = - new GZIPOutputStream(new BufferedOutputStream(fileOutputStream)); - tarOs = new TarArchiveOutputStream(gzipOutputStream); + tarOs = new TarArchiveOutputStream(fileOutputStream); File folder = new File(sourceDir); File[] filesInDir = folder.listFiles(); if (filesInDir != null) { @@ -133,7 +128,6 @@ public static File createTarFile(Path sourcePath) throws IOException { try { org.apache.hadoop.io.IOUtils.closeStream(tarOs); org.apache.hadoop.io.IOUtils.closeStream(fileOutputStream); - org.apache.hadoop.io.IOUtils.closeStream(gzipOutputStream); } catch (Exception e) { LOG.error("Exception encountered when closing " + "TAR file output stream: " + e); @@ -177,12 +171,8 @@ public void untarCheckpointFile(File tarFile, Path destPath) throws IOException { FileInputStream fileInputStream = null; - BufferedInputStream buffIn = null; - GzipCompressorInputStream gzIn = null; try { fileInputStream = new FileInputStream(tarFile); - buffIn = new BufferedInputStream(fileInputStream); - gzIn = new GzipCompressorInputStream(buffIn); //Create Destination directory if it does not exist. if (!destPath.toFile().exists()) { @@ -193,7 +183,7 @@ public void untarCheckpointFile(File tarFile, Path destPath) } try (TarArchiveInputStream tarInStream = - new TarArchiveInputStream(gzIn)) { + new TarArchiveInputStream(fileInputStream)) { TarArchiveEntry entry; while ((entry = (TarArchiveEntry) tarInStream.getNextEntry()) != null) { @@ -223,8 +213,6 @@ public void untarCheckpointFile(File tarFile, Path destPath) } } } finally { - IOUtils.closeStream(gzIn); - IOUtils.closeStream(buffIn); IOUtils.closeStream(fileInputStream); } } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java index cc4bdffb0701..a3897d9a5542 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java @@ -341,7 +341,7 @@ DBCheckpoint getOzoneManagerDBSnapshot() { String snapshotFileName = RECON_OM_SNAPSHOT_DB + "_" + System.currentTimeMillis(); File targetFile = new File(omSnapshotDBParentDir, snapshotFileName + - ".tar.gz"); + ".tar"); try { SecurityUtil.doAsLoginUser(() -> { try (InputStream inputStream = reconUtils.makeHttpCall( diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java index bb94c6d29842..b726c8a51651 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java @@ -175,7 +175,7 @@ public DBCheckpoint getSCMDBSnapshot() { String snapshotFileName = RECON_SCM_SNAPSHOT_DB + "_" + System.currentTimeMillis(); File targetFile = new File(scmSnapshotDBParentDir, snapshotFileName + - ".tar.gz"); + ".tar"); try { if (!SCMHAUtils.isSCMHAEnabled(configuration)) { diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java index e2f0a255690b..c5e37a186767 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java @@ -73,7 +73,7 @@ public Void call() throws Exception { for (int i = 0; i < containerCount; i++) { replicationSource.prepare(containerId); final File destinationFile = - new File(destination, "container-" + containerId + ".tar.gz"); + new File(destination, "container-" + containerId + ".tar"); try (FileOutputStream fos = new FileOutputStream(destinationFile)) { try { replicationSource.copyData(containerId, fos, GZIP);