diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java index b46a742ac3d2..5e41f5a854e7 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java @@ -107,7 +107,6 @@ public final class DBStoreBuilder { // number in request to avoid increase in heap memory. private long maxDbUpdatesSizeThreshold; private Integer maxNumberOfOpenFiles = null; - private String threadNamePrefix = ""; /** * Create DBStoreBuilder from a generic DBDefinition. @@ -233,7 +232,7 @@ public DBStore build() throws IOException { return new RDBStore(dbFile, rocksDBOption, statistics, writeOptions, tableConfigs, registry.build(), openReadOnly, maxFSSnapshots, dbJmxBeanNameName, enableCompactionDag, maxDbUpdatesSizeThreshold, createCheckpointDirs, - configuration, threadNamePrefix, enableRocksDbMetrics); + configuration, enableRocksDbMetrics); } finally { tableConfigs.forEach(TableConfig::close); } @@ -323,11 +322,6 @@ public DBStoreBuilder setMaxNumberOfOpenFiles(Integer maxNumberOfOpenFiles) { return this; } - public DBStoreBuilder setThreadNamePrefix(String prefix) { - this.threadNamePrefix = prefix; - return this; - } - /** * Converts column families and their corresponding options that have been * registered with the builder to a set of {@link TableConfig} objects. diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java index 40d3507d2eb4..fa77cfd937da 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java @@ -74,7 +74,6 @@ public class RDBStore implements DBStore { private final long maxDbUpdatesSizeThreshold; private final ManagedDBOptions dbOptions; private final ManagedStatistics statistics; - private final String threadNamePrefix; @SuppressWarnings("parameternumber") public RDBStore(File dbFile, ManagedDBOptions dbOptions, ManagedStatistics statistics, @@ -83,11 +82,10 @@ public RDBStore(File dbFile, ManagedDBOptions dbOptions, ManagedStatistics stati String dbJmxBeanName, boolean enableCompactionDag, long maxDbUpdatesSizeThreshold, boolean createCheckpointDirs, - ConfigurationSource configuration, String threadNamePrefix, + ConfigurationSource configuration, boolean enableRocksDBMetrics) throws IOException { - this.threadNamePrefix = threadNamePrefix; Preconditions.checkNotNull(dbFile, "DB file location cannot be null"); Preconditions.checkNotNull(families); Preconditions.checkArgument(!families.isEmpty()); @@ -306,8 +304,7 @@ public TypedTable getTable(String name, public Table getTable(String name, Class keyType, Class valueType, TableCache.CacheType cacheType) throws IOException { - return new TypedTable<>(getTable(name), codecRegistry, keyType, - valueType, cacheType, threadNamePrefix); + return new TypedTable<>(getTable(name), codecRegistry, keyType, valueType, cacheType); } @Override diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index 24676ac33b5f..e3785ed0d9f2 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -57,10 +57,9 @@ public class TypedTable implements Table { static final int BUFFER_SIZE_DEFAULT = 4 << 10; // 4 KB private final RDBTable rawTable; + private final String info; - private final Class keyType; private final Codec keyCodec; - private final Class valueType; private final Codec valueCodec; private final boolean supportCodecBuffer; @@ -72,11 +71,9 @@ public class TypedTable implements Table { * The same as this(rawTable, codecRegistry, keyType, valueType, * CacheType.PARTIAL_CACHE). */ - public TypedTable(RDBTable rawTable, - CodecRegistry codecRegistry, Class keyType, - Class valueType) throws IOException { - this(rawTable, codecRegistry, keyType, valueType, - CacheType.PARTIAL_CACHE, ""); + TypedTable(RDBTable rawTable, CodecRegistry codecRegistry, Class keyType, Class valueType) + throws IOException { + this(rawTable, codecRegistry, keyType, valueType, CacheType.PARTIAL_CACHE); } /** @@ -87,27 +84,28 @@ public TypedTable(RDBTable rawTable, * @param keyType The key type. * @param valueType The value type. * @param cacheType How to cache the entries? - * @param threadNamePrefix * @throws IOException if failed to iterate the raw table. */ - public TypedTable(RDBTable rawTable, - CodecRegistry codecRegistry, Class keyType, - Class valueType, - CacheType cacheType, String threadNamePrefix) throws IOException { + TypedTable(RDBTable rawTable, CodecRegistry codecRegistry, Class keyType, Class valueType, + CacheType cacheType) throws IOException { this.rawTable = Objects.requireNonNull(rawTable, "rawTable==null"); Objects.requireNonNull(codecRegistry, "codecRegistry == null"); - this.keyType = Objects.requireNonNull(keyType, "keyType == null"); + Objects.requireNonNull(keyType, "keyType == null"); this.keyCodec = codecRegistry.getCodecFromClass(keyType); Objects.requireNonNull(keyCodec, "keyCodec == null"); - this.valueType = Objects.requireNonNull(valueType, "valueType == null"); + Objects.requireNonNull(valueType, "valueType == null"); this.valueCodec = codecRegistry.getCodecFromClass(valueType); Objects.requireNonNull(valueCodec, "valueCodec == null"); + this.info = getClassSimpleName(getClass()) + "-" + getName() + + "(" + getClassSimpleName(keyType) + "->" + getClassSimpleName(valueType) + ")"; + this.supportCodecBuffer = keyCodec.supportCodecBuffer() && valueCodec.supportCodecBuffer(); + final String threadNamePrefix = rawTable.getName() + "_"; if (cacheType == CacheType.FULL_CACHE) { cache = new FullTableCache<>(threadNamePrefix); //fill cache @@ -443,9 +441,7 @@ public String getName() { @Override public String toString() { - return getClassSimpleName(getClass()) + "-" + getName() - + "(" + getClassSimpleName(keyType) - + "->" + getClassSimpleName(valueType) + ")"; + return info; } @Override @@ -572,14 +568,6 @@ public KEY getKey() throws IOException { public VALUE getValue() throws IOException { return decodeValue(rawKeyValue.getValue()); } - - public byte[] getRawKey() throws IOException { - return rawKeyValue.getKey(); - } - - public byte[] getRawValue() throws IOException { - return rawKeyValue.getValue(); - } } RawIterator newCodecBufferTableIterator( diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java index 24d59b5be069..81626f935754 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java @@ -62,7 +62,7 @@ public static RDBStore newRDBStore(File dbFile, ManagedDBOptions options, throws IOException { return new RDBStore(dbFile, options, null, new ManagedWriteOptions(), families, CodecRegistry.newBuilder().build(), false, 1000, null, false, - maxDbUpdatesSizeThreshold, true, null, "", true); + maxDbUpdatesSizeThreshold, true, null, true); } public static final int MAX_DB_UPDATES_SIZE_THRESHOLD = 80;