diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java index e94f46a398b3..7aac5a8d40b3 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java @@ -263,6 +263,66 @@ void testScanOfPipelinesWhenNoData() throws IOException { assertEquals("", stderr.toString()); } + @Test + void testScanWithRecordsPerFile() throws IOException { + // Prepare dummy table + prepareTable(KEY_TABLE, false); + + String scanDir1 = tempDir.getAbsolutePath() + "/scandir1"; + // Prepare scan args + List completeScanArgs1 = new ArrayList<>(Arrays.asList( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column-family", KEY_TABLE, "--out", scanDir1 + "/key", + "--max-records-per-file", "2")); + + int exitCode1 = cmd.execute(completeScanArgs1.toArray(new String[0])); + // Check exit code. Print stderr if not expected + assertEquals(0, exitCode1); + + // Number of files generated by traversal + int count1 = 0; + File tmpDir1 = new File(scanDir1); + if (tmpDir1.isDirectory()) { + File[] files = tmpDir1.listFiles(); + if (files != null) { + for (File tmpFile : files) { + if (tmpFile.isFile() && tmpFile.getName().startsWith("key")) { + count1++; + } + } + } + } + + assertEquals(count1, 3); + + // Used with parameter '-l' + String scanDir2 = tempDir.getAbsolutePath() + "/scandir2"; + // Prepare scan args + List completeScanArgs2 = new ArrayList<>(Arrays.asList( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column-family", KEY_TABLE, "--out", scanDir2 + "/key", + "--max-records-per-file", "3", "-l", "2")); + + int exitCode2 = cmd.execute(completeScanArgs2.toArray(new String[0])); + // Check exit code. Print stderr if not expected + assertEquals(0, exitCode2); + int count2 = 0; + File tmpDir2 = new File(scanDir2); + if (tmpDir2.isDirectory()) { + File[] files = tmpDir2.listFiles(); + if (files != null) { + for (File tmpFile : files) { + if (tmpFile.isFile() && tmpFile.getName().startsWith("key")) { + count2++; + } + } + } + } + assertEquals(count2, 2); + } + /** * Converts String input to a Map and compares to the given Map input. * @param expected expected result Map diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 91cf38dada6d..4b36be5a1b7c 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -53,6 +53,7 @@ import picocli.CommandLine; import java.io.BufferedWriter; +import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.nio.file.Paths; @@ -113,6 +114,8 @@ public class DBScanner implements Callable, SubcommandWithParent { description = "File to dump table scan data") private String fileName; + private int fileSuffix = 0; + @CommandLine.Option(names = {"--startkey", "--sk", "-s"}, description = "Key from which to iterate the DB") private String startKey; @@ -148,6 +151,11 @@ public class DBScanner implements Callable, SubcommandWithParent { defaultValue = "10") private int threadCount; + @CommandLine.Option(names = {"--max-records-per-file"}, + description = "The number of records to print per file.", + defaultValue = "0") + private long recordsPerFile; + private static final String KEY_SEPARATOR_SCHEMA_V3 = new OzoneConfiguration().getObject(DatanodeConfiguration.class) .getContainerSchemaV3KeySeparator(); @@ -217,11 +225,30 @@ private boolean displayTable(ManagedRocksIterator iterator, return displayTable(iterator, dbColumnFamilyDef, out(), schemaV3); } + // If there are no parent directories, create them + File file = new File(fileName); + File parentFile = file.getParentFile(); + if (!parentFile.exists()) { + boolean flg = parentFile.mkdirs(); + if (!flg) { + throw new IOException("An exception occurred while creating " + + "the directory. Directorys: " + parentFile.getAbsolutePath()); + } + } + // Write to file output - try (PrintWriter out = new PrintWriter(new BufferedWriter( - new PrintWriter(fileName, UTF_8.name())))) { - return displayTable(iterator, dbColumnFamilyDef, out, schemaV3); + while (iterator.get().isValid()) { + String fileNameTarget = recordsPerFile > 0 ? fileName + fileSuffix++ : + fileName; + try (PrintWriter out = new PrintWriter(new BufferedWriter( + new PrintWriter(fileNameTarget, UTF_8.name())))) { + if (!displayTable(iterator, dbColumnFamilyDef, out, schemaV3)) { + return false; + } + } } + + return true; } private boolean displayTable(ManagedRocksIterator iterator, @@ -288,6 +315,9 @@ private void processRecords(ManagedRocksIterator iterator, batch = new ArrayList<>(batchSize); sequenceId++; } + if ((recordsPerFile > 0) && (count >= recordsPerFile)) { + break; + } } if (!batch.isEmpty()) { Future future = threadPool.submit(new Task(dbColumnFamilyDef, @@ -305,7 +335,7 @@ private void processRecords(ManagedRocksIterator iterator, } private boolean withinLimit(long i) { - return limit == -1L || i < limit; + return recordsPerFile > 0 || limit == -1L || i < limit; } private ColumnFamilyHandle getColumnFamilyHandle(