diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandUtils.java index 2a1317ee6c0d5..9bc5535ddef20 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandUtils.java @@ -18,6 +18,42 @@ package org.apache.hadoop.fs.shell; final class CommandUtils { + + private static final String FILE_STATUS_STRING_KV_SEPARATOR = "="; + private static final String FILE_STATUS_STRING_DELIMITER = ";"; + private static final String FILE_STATUS_STRING_TERMINATOR = "}"; + + /** + * Parse the FileStatus.toString() results, + * and return the value of the given key. + * + * @param fileStatusStr String from FileStatus.toString() + * @param key Key String + * @return Value of the key in the String, null if not found. + */ + static String getValueFromFileStatusString(String fileStatusStr, String key) { + String res = null; + // Search backwards since this function is only used for fileId for now, + // which we know we placed it at the end of the FileStatus String. + int start = fileStatusStr.lastIndexOf(key); + int end = -1; + if (start > 0) { + // fileId field found, move the start pointer to the start of the value + start += key.length() + FILE_STATUS_STRING_KV_SEPARATOR.length(); + // Find delimiter ";" to mark value string's end + end = fileStatusStr.indexOf(FILE_STATUS_STRING_DELIMITER, start); + if (end < 0) { + // Delimiter not found, try terminator "}" + end = fileStatusStr.indexOf(FILE_STATUS_STRING_TERMINATOR, start); + } + } + if (end > 0) { + // If value end pointer is not -1 + res = fileStatusStr.substring(start, end); + } + return res; + } + static String formatDescription(String usage, String... desciptions) { StringBuilder b = new StringBuilder(usage + ": " + desciptions[0]); for(int i = 1; i < desciptions.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java index efc541ccf81ee..9f7822db131b3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java @@ -57,13 +57,15 @@ public static void registerCommands(CommandFactory factory) { private static final String OPTION_ATIME = "u"; private static final String OPTION_SIZE = "S"; private static final String OPTION_ECPOLICY = "e"; + private static final String OPTION_PRINTINODEID = "i"; public static final String NAME = "ls"; public static final String USAGE = "[-" + OPTION_PATHONLY + "] [-" + OPTION_DIRECTORY + "] [-" + OPTION_HUMAN + "] [-" + OPTION_HIDENONPRINTABLE + "] [-" + OPTION_RECURSIVE + "] [-" + OPTION_MTIME + "] [-" + OPTION_SIZE + "] [-" + OPTION_REVERSE + "] [-" + - OPTION_ATIME + "] [-" + OPTION_ECPOLICY +"] [ ...]"; + OPTION_ATIME + "] [-" + OPTION_ECPOLICY +"] [-" + + OPTION_PRINTINODEID + "] [ ...]"; public static final String DESCRIPTION = "List the contents that match the specified file pattern. If " + @@ -96,7 +98,9 @@ public static void registerCommands(CommandFactory factory) { " Use time of last access instead of modification for\n" + " display and sorting.\n"+ " -" + OPTION_ECPOLICY + - " Display the erasure coding policy of files and directories.\n"; + " Display the erasure coding policy of files and directories.\n" + + " -" + OPTION_PRINTINODEID + + " Print the inode id for files and directories.\n"; protected final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm"); @@ -110,6 +114,7 @@ public static void registerCommands(CommandFactory factory) { private boolean orderSize; private boolean useAtime; private boolean displayECPolicy; + private boolean printInodeId; private Comparator orderComparator; protected boolean humanReadable = false; @@ -135,7 +140,8 @@ protected void processOptions(LinkedList args) CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, - OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); + OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY, + OPTION_PRINTINODEID); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); @@ -147,6 +153,7 @@ protected void processOptions(LinkedList args) orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); + printInodeId = cf.getOpt(OPTION_PRINTINODEID); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); @@ -289,9 +296,21 @@ protected void processPath(PathData item) throws IOException { return; } FileStatus stat = item.stat; + // Using String as fileId type since we are parsing it from a String + String fileIdStr = null; + if (printInodeId) { + final String fileIdKey = "fileId"; + fileIdStr = CommandUtils.getValueFromFileStatusString( + item.stat.toString(), fileIdKey); + if (fileIdStr == null) { + // fileId field not found, set it to "0" + fileIdStr = "0"; + } + } if (displayECPolicy) { ContentSummary contentSummary = item.fs.getContentSummary(item.path); String line = String.format(lineFormat, + printInodeId ? fileIdStr + " " : "", (stat.isDirectory() ? "d" : "-"), stat.getPermission() + (stat.hasAcl() ? "+" : " "), (stat.isFile() ? stat.getReplication() : "-"), @@ -306,6 +325,7 @@ protected void processPath(PathData item) throws IOException { out.println(line); } else { String line = String.format(lineFormat, + printInodeId ? fileIdStr + " " : "", (stat.isDirectory() ? "d" : "-"), stat.getPermission() + (stat.hasAcl() ? "+" : " "), (stat.isFile() ? stat.getReplication() : "-"), @@ -334,7 +354,7 @@ private void adjustColumnWidths(PathData items[]) throws IOException { } StringBuilder fmt = new StringBuilder(); - fmt.append("%s%s") // permission string + fmt.append("%s%s%s") // inode id and permission string .append("%" + maxRepl + "s ") .append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s") .append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s"); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsLocatedFileStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsLocatedFileStatus.java index 1490e4e4ef624..8b599e4c17e73 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsLocatedFileStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsLocatedFileStatus.java @@ -188,6 +188,16 @@ public int hashCode() { return super.hashCode(); } + @Override + public String toString() { + String res = super.toString(); + // Append fileId field to the end of the String, but before the "}" + StringBuilder sb = new StringBuilder(); + sb.append(res, 0, res.length() - "}".length()); + sb.append("; fileId=").append(fileId).append("}"); + return sb.toString(); + } + /** * Get block locations for this entity, in HDFS format. * See {@link #makeQualifiedLocated(URI, Path)}. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java index 65032514b4e5f..470956478977d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java @@ -2183,6 +2183,19 @@ private static String runLsr(final FsShell shell, String root, int returnvalue return results; } + @Test + public void testLsInodeId() throws Exception { + dfs.mkdirs(new Path("/d1/d2")); + dfs.mkdirs(new Path("/d4/d5")); + final File f3 = createLocalFile(new File(TEST_ROOT_DIR, "f3")); + dfs.moveFromLocalFile(new Path(f3.getPath()), new Path("/d1/d2")); + + FsShell shell = new FsShell(dfs.getConf()); + // Check return value + assertThat(shell.run(new String[]{"-ls", "-i", "/"}), is(0)); + assertThat(shell.run(new String[]{"-ls", "-i", "-R", "/"}), is(0)); + } + /** * default setting is file:// which is not a DFS * so DFSAdmin should throw and catch InvalidArgumentException