Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions hadoop-ozone/common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,11 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
Expand All @@ -33,6 +35,7 @@
import java.util.Optional;
import java.util.OptionalInt;

import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
Expand All @@ -47,6 +50,7 @@
import org.apache.commons.lang3.StringUtils;
import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys;
import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys;
import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_ADDRESS_KEY;
import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_BIND_HOST_DEFAULT;
import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_HTTPS_ADDRESS_KEY;
Expand Down Expand Up @@ -574,4 +578,26 @@ public static String getOzoneManagerServiceId(OzoneConfiguration conf)
return serviceId;
}
}

/**
* Normalize the key name. This method used {@link Path#normalize()} to
* normalize the key name.
* @param keyName
* @return normalized key name.
*/
@SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME")
public static String normalizeKey(String keyName) {
String normalizedKeyName;
if (keyName.startsWith(OM_KEY_PREFIX)) {
normalizedKeyName = Paths.get(keyName).toUri().normalize().getPath();
} else {
normalizedKeyName = Paths.get(OM_KEY_PREFIX, keyName).toUri()
.normalize().getPath();
}
if (!keyName.equals(normalizedKeyName)) {
LOG.debug("Normalized key {} to {} ", keyName,
normalizedKeyName.substring(1));
}
return normalizedKeyName.substring(1);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,18 @@
package org.apache.hadoop.fs.ozone;

import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.TestDataUtil;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneKey;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.client.io.OzoneInputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.om.OMConfigKeys;
import org.junit.Assert;
Expand All @@ -40,6 +44,8 @@
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;

import static org.apache.hadoop.ozone.OzoneConsts.OZONE_URI_SCHEME;
Expand Down Expand Up @@ -209,6 +215,126 @@ public void testObjectStoreCreateWithO3fs() throws Exception {

}

@Test
public void testReadWithNotNormalizedPath() throws Exception {
OzoneVolume ozoneVolume =
cluster.getRpcClient().getObjectStore().getVolume(volumeName);

OzoneBucket ozoneBucket = ozoneVolume.getBucket(bucketName);

String key = "/dir1///dir2/file1/";

int length = 10;
byte[] input = new byte[length];
Arrays.fill(input, (byte)96);
String inputString = new String(input);

OzoneOutputStream ozoneOutputStream =
ozoneBucket.createKey(key, length);

ozoneOutputStream.write(input);
ozoneOutputStream.write(input, 0, 10);
ozoneOutputStream.close();

// Read the key with given key name.
OzoneInputStream ozoneInputStream = ozoneBucket.readKey(key);
byte[] read = new byte[length];
ozoneInputStream.read(read, 0, length);
ozoneInputStream.close();

Assert.assertEquals(inputString, new String(read));

// Read using filesystem.
FSDataInputStream fsDataInputStream = o3fs.open(new Path(key));
read = new byte[length];
fsDataInputStream.read(read, 0, length);
ozoneInputStream.close();

Assert.assertEquals(inputString, new String(read));
}

@Test
public void testListKeysWithNotNormalizedPath() throws Exception {

OzoneVolume ozoneVolume =
cluster.getRpcClient().getObjectStore().getVolume(volumeName);

OzoneBucket ozoneBucket = ozoneVolume.getBucket(bucketName);

String key1 = "/dir1///dir2/file1/";
String key2 = "/dir1///dir2/file2/";
String key3 = "/dir1///dir2/file3/";

LinkedList<String> keys = new LinkedList<>();
keys.add("dir1/");
keys.add("dir1/dir2/");
keys.add(OmUtils.normalizeKey(key1));
keys.add(OmUtils.normalizeKey(key2));
keys.add(OmUtils.normalizeKey(key3));

int length = 10;
byte[] input = new byte[length];
Arrays.fill(input, (byte)96);

createKey(ozoneBucket, key1, 10, input);
createKey(ozoneBucket, key2, 10, input);
createKey(ozoneBucket, key3, 10, input);

// Iterator with key name as prefix.

Iterator<? extends OzoneKey > ozoneKeyIterator =
ozoneBucket.listKeys("/dir1//");

checkKeyList(ozoneKeyIterator, keys);

// Iterator with with normalized key prefix.
ozoneKeyIterator =
ozoneBucket.listKeys("dir1/");

checkKeyList(ozoneKeyIterator, keys);

// Iterator with key name as previous key.
ozoneKeyIterator = ozoneBucket.listKeys(null,
"/dir1///dir2/file1/");

// Remove keys before //dir1/dir2/file1
keys.remove("dir1/");
keys.remove("dir1/dir2/");
keys.remove("dir1/dir2/file1");

checkKeyList(ozoneKeyIterator, keys);

// Iterator with normalized key as previous key.
ozoneKeyIterator = ozoneBucket.listKeys(null,
OmUtils.normalizeKey(key1));

checkKeyList(ozoneKeyIterator, keys);
}

private void checkKeyList(Iterator<? extends OzoneKey > ozoneKeyIterator,
List<String> keys) {

LinkedList<String> outputKeys = new LinkedList<>();
while (ozoneKeyIterator.hasNext()) {
OzoneKey ozoneKey = ozoneKeyIterator.next();
outputKeys.add(ozoneKey.getName());
}

Assert.assertEquals(keys, outputKeys);
}

private void createKey(OzoneBucket ozoneBucket, String key, long length,
byte[] input)
throws Exception {

OzoneOutputStream ozoneOutputStream =
ozoneBucket.createKey(key, length);

ozoneOutputStream.write(input);
ozoneOutputStream.write(input, 0, 10);
ozoneOutputStream.close();
}

private void checkPath(Path path) {
try {
o3fs.getFileStatus(path);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,9 +320,9 @@ private void testDeleteCreatesFakeParentDir() throws Exception {

// Deleting the only child should create the parent dir key if it does
// not exist
String parentKey = o3fs.pathToKey(parent) + "/";
OzoneKeyDetails parentKeyInfo = getKey(parent, true);
assertEquals(parentKey, parentKeyInfo.getName());
FileStatus fileStatus = o3fs.getFileStatus(parent);
Assert.assertTrue(fileStatus.isDirectory());
assertEquals(parent.toString(), fileStatus.getPath().toUri().getPath());
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@
import org.apache.hadoop.ozone.om.helpers.OzoneFSUtils;
import org.apache.hadoop.ozone.om.helpers.OzoneFileStatus;
import org.apache.hadoop.ozone.om.helpers.RepeatedOmKeyInfo;
import org.apache.hadoop.ozone.om.request.OMClientRequest;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.PartKeyInfo;
import org.apache.hadoop.ozone.security.OzoneBlockTokenSecretManager;
import org.apache.hadoop.ozone.security.acl.IAccessAuthorizer;
Expand Down Expand Up @@ -123,6 +124,7 @@
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_KEY_PREALLOCATION_BLOCKS_MAX_DEFAULT;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
import static org.apache.hadoop.ozone.OzoneConsts.OZONE_URI_DELIMITER;
import static org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.BUCKET_NOT_FOUND;
import static org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.DIRECTORY_NOT_FOUND;
Expand Down Expand Up @@ -165,6 +167,8 @@ public class KeyManagerImpl implements KeyManager {
private final KeyProviderCryptoExtension kmsProvider;
private final PrefixManager prefixManager;

private final boolean enableFileSystemPaths;


@VisibleForTesting
public KeyManagerImpl(ScmBlockLocationProtocol scmBlockClient,
Expand Down Expand Up @@ -208,6 +212,9 @@ public KeyManagerImpl(OzoneManager om, ScmClient scmClient,
this.listTrashKeysMax = conf.getInt(
OZONE_CLIENT_LIST_TRASH_KEYS_MAX,
OZONE_CLIENT_LIST_TRASH_KEYS_MAX_DEFAULT);
this.enableFileSystemPaths =
conf.getBoolean(OMConfigKeys.OZONE_OM_ENABLE_FILESYSTEM_PATHS,
OMConfigKeys.OZONE_OM_ENABLE_FILESYSTEM_PATHS_DEFAULT);

this.ozoneManager = om;
this.omId = omId;
Expand Down Expand Up @@ -645,7 +652,9 @@ public OmKeyInfo lookupKey(OmKeyArgs args, String clientAddress)
Preconditions.checkNotNull(args);
String volumeName = args.getVolumeName();
String bucketName = args.getBucketName();
String keyName = args.getKeyName();
String keyName = OMClientRequest.validateAndNormalizeKey(
enableFileSystemPaths, args.getKeyName());

metadataManager.getLock().acquireReadLock(BUCKET_LOCK, volumeName,
bucketName);
OmKeyInfo value = null;
Expand Down Expand Up @@ -912,12 +921,32 @@ public List<OmKeyInfo> listKeys(String volumeName, String bucketName,
// underlying table using an iterator. That automatically creates a
// snapshot of the data, so we don't need these locks at a higher level
// when we iterate.

startKey = normalizeListKeyPath(startKey);
keyPrefix = normalizeListKeyPath(keyPrefix);

List<OmKeyInfo> keyList = metadataManager.listKeys(volumeName, bucketName,
startKey, keyPrefix, maxKeys);
refreshPipeline(keyList);
return keyList;
}

private String normalizeListKeyPath(String keyPath) {

String normalizeKeyPath = keyPath;
if (enableFileSystemPaths) {
// For empty strings do nothing.
if (StringUtils.isBlank(keyPath)) {
return keyPath;
}
normalizeKeyPath = OmUtils.normalizeKey(keyPath);
if (keyPath.endsWith(OM_KEY_PREFIX)) {
normalizeKeyPath = normalizeKeyPath + OM_KEY_PREFIX;
}
}
return normalizeKeyPath;
}

@Override
public List<RepeatedOmKeyInfo> listTrash(String volumeName,
String bucketName, String startKeyName, String keyPrefix,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.audit.AuditAction;
import org.apache.hadoop.ozone.audit.AuditEventStatus;
Expand All @@ -45,11 +45,9 @@
import javax.annotation.Nonnull;
import java.io.IOException;
import java.net.InetAddress;
import java.nio.file.Paths;
import java.util.LinkedHashMap;
import java.util.Map;

import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
import static org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.INVALID_KEY_NAME;

/**
Expand Down Expand Up @@ -284,21 +282,11 @@ public static String validateAndNormalizeKey(boolean enableFileSystemPaths,
}
}

@SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME")

public static String validateAndNormalizeKey(String keyName)
throws OMException {
String normalizedKeyName;
if (keyName.startsWith(OM_KEY_PREFIX)) {
normalizedKeyName = Paths.get(keyName).toUri().normalize().getPath();
} else {
normalizedKeyName = Paths.get(OM_KEY_PREFIX, keyName).toUri()
.normalize().getPath();
}
if (!keyName.equals(normalizedKeyName)) {
LOG.debug("Normalized key {} to {} ", keyName,
normalizedKeyName.substring(1));
}
return isValidKeyPath(normalizedKeyName.substring(1));
String normalizedKeyName = OmUtils.normalizeKey(keyName);
return isValidKeyPath(normalizedKeyName);
}

/**
Expand Down