diff --git a/hadoop-hdds/docs/content/feature/SCM-HA.md b/hadoop-hdds/docs/content/feature/SCM-HA.md index ebbe998fd4ce..aa3142d01bb2 100644 --- a/hadoop-hdds/docs/content/feature/SCM-HA.md +++ b/hadoop-hdds/docs/content/feature/SCM-HA.md @@ -207,10 +207,10 @@ bin/ozone freon randomkeys --numOfVolumes=1 --numOfBuckets=1 --numOfKeys=10000 - // use debug ldb to check scm db on all the machines -bin/ozone debug ldb --db=/tmp/metadata/scm.db/ ls +bin/ozone debug ldb --db=/tmp/metadata/scm.db ls -bin/ozone debug ldb --db=/tmp/metadata/scm.db/ scan --with-keys --column_family=containers +bin/ozone debug ldb --db=/tmp/metadata/scm.db scan --column_family=containers ``` ## Migrating from existing SCM diff --git a/hadoop-hdds/docs/content/feature/SCM-HA.zh.md b/hadoop-hdds/docs/content/feature/SCM-HA.zh.md index 4f4ce3b499e9..ae8fb314cf79 100644 --- a/hadoop-hdds/docs/content/feature/SCM-HA.zh.md +++ b/hadoop-hdds/docs/content/feature/SCM-HA.zh.md @@ -147,10 +147,10 @@ bin/ozone freon randomkeys --numOfVolumes=1 --numOfBuckets=1 --numOfKeys=10000 - // use debug ldb to check scm db on all the machines -bin/ozone debug ldb --db=/tmp/metadata/scm.db/ ls +bin/ozone debug ldb --db=/tmp/metadata/scm.db ls -bin/ozone debug ldb --db=/tmp/metadata/scm.db/ scan --with-keys --column_family=containers +bin/ozone debug ldb --db=/tmp/metadata/scm.db scan --column_family=containers ``` ## 从现有的SCM迁移 diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java index 9b2014dd7b84..eabe05913814 100644 --- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java +++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java @@ -25,6 +25,7 @@ import java.util.Map; import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileEncryptionInfo; @@ -150,6 +151,7 @@ public void setFileName(String fileName) { this.fileName = fileName; } + @JsonIgnore public String getFileName() { return fileName; } @@ -755,6 +757,7 @@ public void setFileEncryptionInfo(FileEncryptionInfo fileEncryptionInfo) { this.encInfo = fileEncryptionInfo; } + @JsonIgnore public String getPath() { if (StringUtils.isBlank(getFileName())) { return getKeyName(); diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java index a670cf69d42a..c905a2eee396 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmLDBCli.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF @@ -16,10 +16,9 @@ */ package org.apache.hadoop.ozone.om; - -import org.apache.commons.io.FileUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.server.JsonUtils; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.Table; @@ -28,160 +27,174 @@ import org.apache.hadoop.ozone.debug.RDBParser; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.request.OMRequestTestUtils; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.junit.Assert; import org.junit.rules.TemporaryFolder; +import picocli.CommandLine; -import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; -import java.io.InputStreamReader; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; import java.time.LocalDateTime; -import java.util.List; -import java.util.ArrayList; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; import static java.nio.charset.StandardCharsets.UTF_8; - +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; /** * This class tests the Debug LDB CLI that reads from an om.db file. */ public class TestOmLDBCli { - private OzoneConfiguration conf; - private RDBParser rdbParser; - private DBScanner dbScanner; - private DBStore dbStore = null; - private List keyNames; + private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final String KEY_TABLE = "keyTable"; + + private DBStore dbStore; @Rule public TemporaryFolder folder = new TemporaryFolder(); + private CommandLine cmd; + private StringWriter output; + private StringWriter error; + private NavigableMap> keys; + @Before public void setup() throws Exception { - conf = new OzoneConfiguration(); - rdbParser = new RDBParser(); - dbScanner = new DBScanner(); - keyNames = new ArrayList<>(); - } + OzoneConfiguration conf = new OzoneConfiguration(); - @After - public void shutdown() throws Exception { - if (dbStore != null) { - dbStore.close(); - } - } - - @Test - public void testOMDB() throws Exception { File newFolder = folder.newFolder(); if (!newFolder.exists()) { - Assert.assertTrue(newFolder.mkdirs()); + assertTrue(newFolder.mkdirs()); } // Dummy om.db with only keyTable dbStore = DBStoreBuilder.newBuilder(conf) - .setName("om.db") - .setPath(newFolder.toPath()) - .addTable("keyTable") - .build(); + .setName("om.db") + .setPath(newFolder.toPath()) + .addTable(KEY_TABLE) + .build(); + // insert 5 keys + keys = new TreeMap<>(); for (int i = 0; i < 5; i++) { OmKeyInfo value = OMRequestTestUtils.createOmKeyInfo("sampleVol", "sampleBuck", "key" + (i + 1), HddsProtos.ReplicationType.STAND_ALONE, HddsProtos.ReplicationFactor.ONE); String key = "key" + (i); - Table keyTable = dbStore.getTable("keyTable"); + keys.put(key, toMap(value)); + Table keyTable = dbStore.getTable(KEY_TABLE); byte[] arr = value .getProtobuf(ClientVersion.CURRENT_VERSION).toByteArray(); keyTable.put(key.getBytes(UTF_8), arr); } - rdbParser.setDbPath(dbStore.getDbLocation().getAbsolutePath()); - dbScanner.setParent(rdbParser); - Assert.assertEquals(5, getKeyNames(dbScanner).size()); - Assert.assertTrue(getKeyNames(dbScanner).contains("key1")); - Assert.assertTrue(getKeyNames(dbScanner).contains("key5")); - Assert.assertFalse(getKeyNames(dbScanner).contains("key6")); - - DBScanner.setLimit(1); - Assert.assertEquals(1, getKeyNames(dbScanner).size()); - - DBScanner.setLimit(0); - try { - getKeyNames(dbScanner); - Assert.fail("IllegalArgumentException is expected"); - } catch (IllegalArgumentException e) { - //ignore + + output = new StringWriter(); + error = new StringWriter(); + cmd = new CommandLine(new RDBParser()) + .addSubcommand(new DBScanner()) + .setOut(new PrintWriter((output))) + .setErr(new PrintWriter((error))); + } + + @After + public void shutdown() throws Exception { + if (dbStore != null) { + dbStore.close(); } + } + + @Test + public void testDefaults() throws Exception { + int exitCode = cmd.execute( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column_family", KEY_TABLE); + + assertNoError(exitCode); + assertContents(output.toString(), keys); + } + + @Test + public void testLength() throws Exception { + int exitCode = cmd.execute( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column_family", KEY_TABLE, + "--length", "1"); + + assertNoError(exitCode); + assertContents(output.toString(), keys.headMap("key0", true)); + } + + @Test + public void testInvalidLength() { + int exitCode = cmd.execute( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column_family", KEY_TABLE, + "--length", "0"); + + assertNotEquals(0, exitCode); + assertTrue(error.toString().contains("IllegalArgument")); + } + @Test + public void testUnlimitedLength() throws Exception { // If set with -1, check if it dumps entire table data. - DBScanner.setLimit(-1); - Assert.assertEquals(5, getKeyNames(dbScanner).size()); + int exitCode = cmd.execute( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column_family", KEY_TABLE, + "--length", "-1"); + + assertNoError(exitCode); + assertContents(output.toString(), keys); + } - // Test dump to file. + @Test + public void testOutputToFile() throws IOException { File tempFile = folder.newFolder(); - String outFile = tempFile.getAbsolutePath() + "keyTable" + String outFile = tempFile.getAbsolutePath() + KEY_TABLE + LocalDateTime.now(); - BufferedReader bufferedReader = null; - try { - DBScanner.setLimit(-1); - DBScanner.setFileName(outFile); - keyNames = getKeyNames(dbScanner); - Assert.assertEquals(5, keyNames.size()); - Assert.assertTrue(new File(outFile).exists()); - - bufferedReader = new BufferedReader( - new InputStreamReader(new FileInputStream(outFile), UTF_8)); - - String readLine; - int count = 0; - - while ((readLine = bufferedReader.readLine()) != null) { - for (String keyName : keyNames) { - if (readLine.contains(keyName)) { - count++; - break; - } - } - } - - // As keyName will be in the file twice for each key. - // Once in keyName and second time in fileName. - - // Sample key data. - // { - // .. - // .. - // "keyName": "key5", - // "fileName": "key5", - // .. - // .. - // } - - Assert.assertEquals("File does not have all keys", - keyNames.size() * 2, count); - } finally { - if (bufferedReader != null) { - bufferedReader.close(); - } - if (new File(outFile).exists()) { - FileUtils.deleteQuietly(new File(outFile)); - } - } + int exitCode = cmd.execute( + "--db", dbStore.getDbLocation().getAbsolutePath(), + "scan", + "--column_family", KEY_TABLE, + "--out", outFile, + "--length", "-1"); + + assertNoError(exitCode); + File file = new File(outFile); + assertTrue(file.exists()); + assertContents(FileUtils.readFileToString(file, UTF_8), keys); } - private List getKeyNames(DBScanner scanner) - throws Exception { - keyNames.clear(); - scanner.setTableName("keyTable"); - scanner.call(); - Assert.assertFalse(scanner.getScannedObjects().isEmpty()); - for (Object o : scanner.getScannedObjects()) { - OmKeyInfo keyInfo = (OmKeyInfo)o; - keyNames.add(keyInfo.getKeyName()); - } - return keyNames; + private void assertNoError(int exitCode) { + assertEquals(error.toString(), 0, exitCode); } + + private void assertContents(String content, Map expected) + throws IOException { + Map> result = MAPPER.readValue(content, + new TypeReference>>() { }); + + assertEquals(expected, result); + } + + private static Map toMap(OmKeyInfo obj) throws IOException { + String json = JsonUtils.toJsonStringWithDefaultPrettyPrinter(obj); + return MAPPER.readValue(json, new TypeReference>() { }); + } + } diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 94cbb855a75e..852789ea1a5f 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -18,27 +18,25 @@ package org.apache.hadoop.ozone.debug; -import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.PrintWriter; -import java.io.Writer; -import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.Callable; import org.apache.hadoop.hdds.cli.SubcommandWithParent; +import org.apache.hadoop.hdds.server.JsonUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition; import org.apache.hadoop.hdds.utils.db.DBDefinition; import org.apache.hadoop.ozone.OzoneConsts; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import org.kohsuke.MetaInfServices; import org.rocksdb.ColumnFamilyDescriptor; import org.rocksdb.ColumnFamilyHandle; @@ -46,6 +44,8 @@ import org.rocksdb.RocksIterator; import picocli.CommandLine; +import static java.nio.charset.StandardCharsets.UTF_8; + /** * Parser for scm.db file. */ @@ -56,112 +56,104 @@ @MetaInfServices(SubcommandWithParent.class) public class DBScanner implements Callable, SubcommandWithParent { + @CommandLine.Spec + private CommandLine.Model.CommandSpec spec; + @CommandLine.Option(names = {"--column_family"}, required = true, description = "Table name") private String tableName; + @Deprecated @CommandLine.Option(names = {"--with-keys"}, - description = "List Key -> Value instead of just Value.", - defaultValue = "false", - showDefaultValue = CommandLine.Help.Visibility.ALWAYS) - private static boolean withKey; + description = "[ignored]", + defaultValue = "false") + private boolean withKeys; + + @CommandLine.Option(names = {"--omit-keys"}, + description = "Print only values", + defaultValue = "false") + private boolean omitKeys; @CommandLine.Option(names = {"--length", "-l"}, description = "Maximum number of items to list. " + "If -1 dumps the entire table data") - private static int limit = 100; + private int limit = 100; @CommandLine.Option(names = {"--out", "-o"}, description = "File to dump table scan data") - private static String fileName; + private String fileName; @CommandLine.Option(names = {"--dnSchema", "-d"}, description = "Datanode DB Schema Version : V1/V2", defaultValue = "V2") - private static String dnDBSchemaVersion; + private String dnDBSchemaVersion; @CommandLine.ParentCommand private RDBParser parent; private HashMap columnFamilyMap; - private List scannedObjects; + private PrintWriter err() { + return spec.commandLine().getErr(); + } - private static List displayTable(RocksIterator iterator, + private PrintWriter out() { + return spec.commandLine().getOut(); + } + + private void displayTable(RocksIterator iterator, DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException { - List outputs = new ArrayList<>(); iterator.seekToFirst(); - Writer fileWriter = null; - PrintWriter printWriter = null; - try { - if (fileName != null) { - fileWriter = new OutputStreamWriter( - new FileOutputStream(fileName), StandardCharsets.UTF_8); - printWriter = new PrintWriter(fileWriter); - } - while (iterator.isValid()) { - StringBuilder result = new StringBuilder(); - if (withKey) { - Object key = dbColumnFamilyDefinition.getKeyCodec() - .fromPersistedFormat(iterator.key()); - Gson gson = new GsonBuilder().setPrettyPrinting().create(); - result.append(gson.toJson(key)); - result.append(" -> "); - } - Object o = dbColumnFamilyDefinition.getValueCodec() - .fromPersistedFormat(iterator.value()); - outputs.add(o); - Gson gson = new GsonBuilder().setPrettyPrinting().create(); - result.append(gson.toJson(o)); - if (fileName != null) { - printWriter.println(result); - } else { - System.out.println(result.toString()); - } - limit--; - iterator.next(); - if (limit == 0) { - break; - } - } - } finally { - if (printWriter != null) { - printWriter.close(); - } - if (fileWriter != null) { - fileWriter.close(); + if (fileName != null) { + try (PrintWriter out = new PrintWriter(fileName, UTF_8.name())) { + displayTable(iterator, dbColumnFamilyDefinition, out); } + } else { + displayTable(iterator, dbColumnFamilyDefinition, out()); } - return outputs; - } - - public void setTableName(String tableName) { - this.tableName = tableName; } - public RDBParser getParent() { - return parent; - } - - public void setParent(RDBParser parent) { - this.parent = parent; + private void displayTable(RocksIterator iter, + DBColumnFamilyDefinition dbColumnFamilyDefinition, PrintWriter out) + throws IOException { + Object result; + if (omitKeys) { + List list = new ArrayList<>(); + for (int i = 0; iter.isValid() && withinLimit(i); iter.next(), i++) { + list.add(getValue(iter, dbColumnFamilyDefinition)); + } + result = list; + } else { + Map map = new LinkedHashMap<>(); + for (int i = 0; iter.isValid() && withinLimit(i); iter.next(), i++) { + Object k = getKey(iter, dbColumnFamilyDefinition); + Object v = getValue(iter, dbColumnFamilyDefinition); + map.put(k, v); + } + result = map; + } + out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(result)); } - public static void setLimit(int limit) { - DBScanner.limit = limit; + private boolean withinLimit(int i) { + return limit == -1 || i < limit; } - public List getScannedObjects() { - return scannedObjects; + private Object getKey(RocksIterator iterator, + DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException { + return dbColumnFamilyDefinition.getKeyCodec() + .fromPersistedFormat(iterator.key()); } - public static void setFileName(String name) { - DBScanner.fileName = name; + private Object getValue(RocksIterator iterator, + DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException { + return dbColumnFamilyDefinition.getValueCodec() + .fromPersistedFormat(iterator.value()); } - private static ColumnFamilyHandle getColumnFamilyHandle( + private ColumnFamilyHandle getColumnFamilyHandle( byte[] name, List columnFamilyHandles) { return columnFamilyHandles .stream() @@ -182,13 +174,13 @@ private void constructColumnFamilyMap(DBDefinition dbDefinition) { System.out.println("Incorrect Db Path"); return; } - this.columnFamilyMap = new HashMap<>(); + columnFamilyMap = new HashMap<>(); DBColumnFamilyDefinition[] columnFamilyDefinitions = dbDefinition .getColumnFamilies(); for (DBColumnFamilyDefinition definition:columnFamilyDefinitions) { - System.out.println("Added definition for table:" + + err().println("Added definition for table:" + definition.getTableName()); - this.columnFamilyMap.put(definition.getTableName(), definition); + columnFamilyMap.put(definition.getTableName(), definition); } } @@ -201,8 +193,7 @@ public Void call() throws Exception { new ArrayList<>(); RocksDB rocksDB = RocksDB.openReadOnly(parent.getDbPath(), cfs, columnFamilyHandleList); - this.printAppropriateTable(columnFamilyHandleList, - rocksDB, parent.getDbPath()); + printAppropriateTable(columnFamilyHandleList, rocksDB, parent.getDbPath()); return null; } @@ -214,28 +205,28 @@ private void printAppropriateTable( "List length should be a positive number. Only allowed negative" + " number is -1 which is to dump entire table"); } - dbPath = removeTrailingSlashIfNeeded(dbPath); + Path path = Paths.get(removeTrailingSlashIfNeeded(dbPath)); DBDefinitionFactory.setDnDBSchemaVersion(dnDBSchemaVersion); - this.constructColumnFamilyMap(DBDefinitionFactory. - getDefinition(Paths.get(dbPath), new OzoneConfiguration())); - if (this.columnFamilyMap != null) { - if (!this.columnFamilyMap.containsKey(tableName)) { - System.out.print("Table with name:" + tableName + " does not exist"); + constructColumnFamilyMap(DBDefinitionFactory.getDefinition(path, + new OzoneConfiguration())); + if (columnFamilyMap != null) { + if (!columnFamilyMap.containsKey(tableName)) { + err().print("Table with name:" + tableName + " does not exist"); } else { DBColumnFamilyDefinition columnFamilyDefinition = - this.columnFamilyMap.get(tableName); + columnFamilyMap.get(tableName); ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle( columnFamilyDefinition.getTableName() - .getBytes(StandardCharsets.UTF_8), + .getBytes(UTF_8), columnFamilyHandleList); if (columnFamilyHandle == null) { throw new IllegalArgumentException("columnFamilyHandle is null"); } RocksIterator iterator = rocksDB.newIterator(columnFamilyHandle); - scannedObjects = displayTable(iterator, columnFamilyDefinition); + displayTable(iterator, columnFamilyDefinition); } } else { - System.out.println("Incorrect db Path"); + err().println("Incorrect db Path"); } } diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java index f133386ab13f..e18baaab1838 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java @@ -49,10 +49,6 @@ public String getDbPath() { return dbPath; } - public void setDbPath(String dbPath) { - this.dbPath = dbPath; - } - @Override public Class getParentType() { return OzoneDebug.class;