Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@
package org.apache.hadoop.ozone.container.metadata;

import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.hdds.utils.db.BatchOperation;
import org.apache.hadoop.hdds.utils.db.CodecException;
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
import org.apache.hadoop.hdds.utils.db.Table;

/**
Expand All @@ -42,34 +43,32 @@ public DatanodeTable(Table<KEY, VALUE> table) {
}

@Override
public void put(KEY key, VALUE value) throws IOException {
public void put(KEY key, VALUE value) throws RocksDatabaseException, CodecException {
table.put(key, value);
}

@Override
public void putWithBatch(BatchOperation batch, KEY key,
VALUE value) throws IOException {
public void putWithBatch(BatchOperation batch, KEY key, VALUE value) throws RocksDatabaseException, CodecException {
table.putWithBatch(batch, key, value);
}

@Override
public boolean isEmpty() throws IOException {
public boolean isEmpty() throws RocksDatabaseException {
return table.isEmpty();
}

@Override
public void delete(KEY key) throws IOException {
public void delete(KEY key) throws RocksDatabaseException, CodecException {
table.delete(key);
}

@Override
public void deleteRange(KEY beginKey, KEY endKey) throws IOException {
public void deleteRange(KEY beginKey, KEY endKey) throws RocksDatabaseException, CodecException {
table.deleteRange(beginKey, endKey);
}

@Override
public void deleteWithBatch(BatchOperation batch, KEY key)
throws IOException {
public void deleteWithBatch(BatchOperation batch, KEY key) throws CodecException {
table.deleteWithBatch(batch, key);
}

Expand All @@ -86,60 +85,58 @@ public String getName() {
}

@Override
public long getEstimatedKeyCount() throws IOException {
public long getEstimatedKeyCount() throws RocksDatabaseException {
return table.getEstimatedKeyCount();
}

@Override
public boolean isExist(KEY key) throws IOException {
public boolean isExist(KEY key) throws RocksDatabaseException, CodecException {
return table.isExist(key);
}

@Override
public VALUE get(KEY key) throws IOException {
public VALUE get(KEY key) throws RocksDatabaseException, CodecException {
return table.get(key);
}

@Override
public VALUE getIfExist(KEY key) throws IOException {
public VALUE getIfExist(KEY key) throws RocksDatabaseException, CodecException {
return table.getIfExist(key);
}

@Override
public VALUE getReadCopy(KEY key) throws IOException {
public VALUE getReadCopy(KEY key) throws RocksDatabaseException, CodecException {
return table.getReadCopy(key);
}

@Override
public List<KeyValue<KEY, VALUE>> getRangeKVs(
KEY startKey, int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
throws RocksDatabaseException, CodecException {
return table.getRangeKVs(startKey, count, prefix, filters);
}

@Override
public List<KeyValue<KEY, VALUE>> getSequentialRangeKVs(
KEY startKey, int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
throws RocksDatabaseException, CodecException {
return table.getSequentialRangeKVs(startKey, count, prefix, filters);
}

@Override
public void deleteBatchWithPrefix(BatchOperation batch, KEY prefix)
throws IOException {
public void deleteBatchWithPrefix(BatchOperation batch, KEY prefix) throws RocksDatabaseException, CodecException {
table.deleteBatchWithPrefix(batch, prefix);
}

@Override
public void dumpToFileWithPrefix(File externalFile, KEY prefix)
throws IOException {
public void dumpToFileWithPrefix(File externalFile, KEY prefix) throws RocksDatabaseException, CodecException {
table.dumpToFileWithPrefix(externalFile, prefix);
}

@Override
public void loadFromFile(File externalFile) throws IOException {
public void loadFromFile(File externalFile) throws RocksDatabaseException {
table.loadFromFile(externalFile);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@

package org.apache.hadoop.ozone.container.metadata;

import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.hdds.utils.db.BatchOperation;
import org.apache.hadoop.hdds.utils.db.CodecException;
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfoList;

Expand Down Expand Up @@ -51,58 +52,56 @@ public SchemaOneDeletedBlocksTable(Table<String, ChunkInfoList> table) {
}

@Override
public void put(String key, ChunkInfoList value) throws IOException {
public void put(String key, ChunkInfoList value) throws RocksDatabaseException, CodecException {
super.put(prefix(key), value);
}

@Override
public void putWithBatch(BatchOperation batch, String key,
ChunkInfoList value)
throws IOException {
public void putWithBatch(BatchOperation batch, String key, ChunkInfoList value)
throws RocksDatabaseException, CodecException {
super.putWithBatch(batch, prefix(key), value);
}

@Override
public void delete(String key) throws IOException {
public void delete(String key) throws RocksDatabaseException, CodecException {
super.delete(prefix(key));
}

@Override
public void deleteWithBatch(BatchOperation batch, String key)
throws IOException {
public void deleteWithBatch(BatchOperation batch, String key) throws CodecException {
super.deleteWithBatch(batch, prefix(key));
}

@Override
public void deleteRange(String beginKey, String endKey) throws IOException {
public void deleteRange(String beginKey, String endKey) throws RocksDatabaseException, CodecException {
super.deleteRange(prefix(beginKey), prefix(endKey));
}

@Override
public boolean isExist(String key) throws IOException {
public boolean isExist(String key) throws RocksDatabaseException, CodecException {
return super.isExist(prefix(key));
}

@Override
public ChunkInfoList get(String key) throws IOException {
public ChunkInfoList get(String key) throws RocksDatabaseException, CodecException {
return super.get(prefix(key));
}

@Override
public ChunkInfoList getIfExist(String key) throws IOException {
public ChunkInfoList getIfExist(String key) throws RocksDatabaseException, CodecException {
return super.getIfExist(prefix(key));
}

@Override
public ChunkInfoList getReadCopy(String key) throws IOException {
public ChunkInfoList getReadCopy(String key) throws RocksDatabaseException, CodecException {
return super.getReadCopy(prefix(key));
}

@Override
public List<KeyValue<String, ChunkInfoList>> getRangeKVs(
String startKey, int count, String prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
throws RocksDatabaseException, CodecException {

// Deleted blocks will always have the #deleted# key prefix and nothing
// else in this schema version. Ignore any user passed prefixes that could
Expand All @@ -115,7 +114,7 @@ public List<KeyValue<String, ChunkInfoList>> getRangeKVs(
public List<KeyValue<String, ChunkInfoList>> getSequentialRangeKVs(
String startKey, int count, String prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
throws RocksDatabaseException, CodecException {

// Deleted blocks will always have the #deleted# key prefix and nothing
// else in this schema version. Ignore any user passed prefixes that could
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
import static org.apache.hadoop.hdds.StringUtils.bytes2String;

import com.google.common.base.Preconditions;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
Expand All @@ -32,6 +30,7 @@
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteBatch;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteOptions;
import org.apache.ratis.util.TraditionalBinaryPrefix;
import org.apache.ratis.util.UncheckedAutoCloseable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -156,7 +155,7 @@ private class FamilyCache {
}

/** Prepare batch write for the entire family. */
void prepareBatchWrite() throws IOException {
void prepareBatchWrite() throws RocksDatabaseException {
Preconditions.checkState(!isCommit, "%s is already committed.", this);
isCommit = true;
for (Map.Entry<Bytes, Object> op : ops.entrySet()) {
Expand Down Expand Up @@ -289,7 +288,7 @@ void delete(ColumnFamily family, byte[] key) {
}

/** Prepare batch write for the entire cache. */
Closeable prepareBatchWrite() throws IOException {
UncheckedAutoCloseable prepareBatchWrite() throws RocksDatabaseException {
for (Map.Entry<String, FamilyCache> e : name2cache.entrySet()) {
e.getValue().prepareBatchWrite();
}
Expand Down Expand Up @@ -341,19 +340,18 @@ public String toString() {
return name;
}

public void commit(RocksDatabase db) throws IOException {
public void commit(RocksDatabase db) throws RocksDatabaseException {
debug(() -> String.format("%s: commit %s",
name, opCache.getCommitString()));
try (Closeable ignored = opCache.prepareBatchWrite()) {
try (UncheckedAutoCloseable ignored = opCache.prepareBatchWrite()) {
db.batchWrite(writeBatch);
}
}

public void commit(RocksDatabase db, ManagedWriteOptions writeOptions)
throws IOException {
public void commit(RocksDatabase db, ManagedWriteOptions writeOptions) throws RocksDatabaseException {
debug(() -> String.format("%s: commit-with-writeOptions %s",
name, opCache.getCommitString()));
try (Closeable ignored = opCache.prepareBatchWrite()) {
try (UncheckedAutoCloseable ignored = opCache.prepareBatchWrite()) {
db.batchWrite(writeBatch, writeOptions);
}
}
Expand All @@ -365,17 +363,15 @@ public void close() {
opCache.clear();
}

public void delete(ColumnFamily family, byte[] key) throws IOException {
public void delete(ColumnFamily family, byte[] key) {
opCache.delete(family, key);
}

public void put(ColumnFamily family, CodecBuffer key, CodecBuffer value)
throws IOException {
public void put(ColumnFamily family, CodecBuffer key, CodecBuffer value) {
opCache.put(family, key, value);
}

public void put(ColumnFamily family, byte[] key, byte[] value)
throws IOException {
public void put(ColumnFamily family, byte[] key, byte[] value) {
opCache.put(family, key, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -303,17 +303,18 @@ public void commitBatchOperation(BatchOperation operation)
}

@Override
public RDBTable getTable(String name) throws IOException {
public RDBTable getTable(String name) throws RocksDatabaseException {
final ColumnFamily handle = db.getColumnFamily(name);
if (handle == null) {
throw new IOException("No such table in this DB. TableName : " + name);
throw new RocksDatabaseException("No such table in this DB. TableName : " + name);
}
return new RDBTable(this.db, handle, rdbMetrics);
}

@Override
public <K, V> TypedTable<K, V> getTable(
String name, Codec<K> keyCodec, Codec<V> valueCodec, TableCache.CacheType cacheType) throws IOException {
String name, Codec<K> keyCodec, Codec<V> valueCodec, TableCache.CacheType cacheType)
throws RocksDatabaseException, CodecException {
return new TypedTable<>(getTable(name), keyCodec, valueCodec, cacheType);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.hadoop.hdds.utils.db;

import java.io.IOException;
import java.util.NoSuchElementException;
import java.util.function.Consumer;
import org.apache.hadoop.hdds.utils.db.managed.ManagedRocksIterator;
Expand Down Expand Up @@ -59,7 +58,7 @@ abstract class RDBStoreAbstractIterator<RAW>
abstract void seek0(RAW key);

/** Delete the given key. */
abstract void delete(RAW key) throws IOException;
abstract void delete(RAW key) throws RocksDatabaseException;

/** Does the given key start with the prefix? */
abstract boolean startsWithPrefix(RAW key);
Expand Down Expand Up @@ -136,7 +135,7 @@ public final Table.KeyValue<RAW, RAW> seek(RAW key) {
}

@Override
public final void removeFromDB() throws IOException {
public final void removeFromDB() throws RocksDatabaseException, CodecException {
if (rocksDBTable == null) {
throw new UnsupportedOperationException("remove");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.hadoop.hdds.utils.db;

import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.hdds.utils.db.managed.ManagedRocksIterator;

Expand Down Expand Up @@ -56,7 +55,7 @@ void seek0(byte[] key) {
}

@Override
void delete(byte[] key) throws IOException {
void delete(byte[] key) throws RocksDatabaseException {
getRocksDBTable().delete(key);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.hadoop.hdds.utils.db;

import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.hdds.utils.db.managed.ManagedRocksIterator;
Expand Down Expand Up @@ -69,7 +68,7 @@ void seek0(CodecBuffer key) {
}

@Override
void delete(CodecBuffer key) throws IOException {
void delete(CodecBuffer key) throws RocksDatabaseException {
assertOpen();
getRocksDBTable().delete(key.asReadOnlyByteBuffer());
}
Expand Down
Loading
Loading