Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdds.utils;

import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
import static org.apache.hadoop.hdds.utils.RocksDBStoreMetrics.ROCKSDB_CONTEXT_PREFIX;

import java.util.ArrayList;
import java.util.Collection;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,18 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.ReflectionException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
* Adapter JMX bean to publish all the Rocksdb metrics.
* All Rocksdb metrics.
*/
public class RocksDBStoreMBean implements DynamicMBean, MetricsSource {
public class RocksDBStoreMetrics implements MetricsSource {

private Statistics statistics;

Expand All @@ -66,10 +56,11 @@ public class RocksDBStoreMBean implements DynamicMBean, MetricsSource {
private String contextName;

private static final Logger LOG =
LoggerFactory.getLogger(RocksDBStoreMBean.class);
LoggerFactory.getLogger(RocksDBStoreMetrics.class);

public static final String ROCKSDB_CONTEXT_PREFIX = "Rocksdb_";
public static final String ROCKSDB_PROPERTY_PREFIX = "rocksdb.";
private static final String BLOB_DB_PREFIX = "BLOB_DB_";

// RocksDB properties
// Column1: rocksDB property original name
Expand Down Expand Up @@ -114,7 +105,7 @@ public class RocksDBStoreMBean implements DynamicMBean, MetricsSource {
private static final String NUM_FILES_AT_LEVEL = "num_files_at_level";
private static final String SIZE_AT_LEVEL = "size_at_level";

public RocksDBStoreMBean(Statistics statistics, RocksDatabase db,
public RocksDBStoreMetrics(Statistics statistics, RocksDatabase db,
String dbName) {
this.contextName = ROCKSDB_CONTEXT_PREFIX + dbName;
this.statistics = statistics;
Expand All @@ -132,104 +123,22 @@ public RocksDBStoreMBean(Statistics statistics, RocksDatabase db,
}
}

public static RocksDBStoreMBean create(Statistics statistics,
public static RocksDBStoreMetrics create(Statistics statistics,
RocksDatabase db, String contextName) {
RocksDBStoreMBean rocksDBStoreMBean = new RocksDBStoreMBean(
RocksDBStoreMetrics metrics = new RocksDBStoreMetrics(
statistics, db, contextName);
MetricsSystem ms = DefaultMetricsSystem.instance();
MetricsSource metricsSource = ms.getSource(rocksDBStoreMBean.contextName);
MetricsSource metricsSource = ms.getSource(metrics.contextName);
if (metricsSource != null) {
return (RocksDBStoreMBean)metricsSource;
return (RocksDBStoreMetrics)metricsSource;
} else {
return ms.register(rocksDBStoreMBean.contextName,
"RocksDB Metrics",
rocksDBStoreMBean);
return ms.register(metrics.contextName, "RocksDB Metrics", metrics);
}
}

@Override
public Object getAttribute(String attribute)
throws AttributeNotFoundException, MBeanException, ReflectionException {
for (String histogramAttribute : histogramAttributes) {
if (attribute.endsWith("_" + histogramAttribute.toUpperCase())) {
String keyName = attribute
.substring(0, attribute.length() - histogramAttribute.length() - 1);
try {
HistogramData histogram =
statistics.getHistogramData(HistogramType.valueOf(keyName));
try {
Method method =
HistogramData.class.getMethod("get" + histogramAttribute);
return method.invoke(histogram);
} catch (Exception e) {
throw new ReflectionException(e,
"Can't read attribute " + attribute);
}
} catch (IllegalArgumentException exception) {
throw new AttributeNotFoundException(
"No such attribute in RocksDB stats: " + attribute);
}
}
}
try {
return statistics.getTickerCount(TickerType.valueOf(attribute));
} catch (IllegalArgumentException ex) {
throw new AttributeNotFoundException(
"No such attribute in RocksDB stats: " + attribute);
}
}

@Override
public void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException,
MBeanException, ReflectionException {
}

@Override
public AttributeList getAttributes(String[] attributes) {
AttributeList result = new AttributeList();
for (String attributeName : attributes) {
try {
Object value = getAttribute(attributeName);
result.add(value);
} catch (Exception e) {
//TODO
}
}
return result;
}

@Override
public AttributeList setAttributes(AttributeList attributes) {
return null;
}

@Override
public Object invoke(String actionName, Object[] params, String[] signature)
throws MBeanException, ReflectionException {
return null;
}

@Override
public MBeanInfo getMBeanInfo() {

List<MBeanAttributeInfo> attributes = new ArrayList<>();
for (TickerType tickerType : TickerType.values()) {
attributes.add(new MBeanAttributeInfo(tickerType.name(), "long",
"RocksDBStat: " + tickerType.name(), true, false, false));
}
for (HistogramType histogramType : HistogramType.values()) {
for (String histogramAttribute : histogramAttributes) {
attributes.add(new MBeanAttributeInfo(
histogramType.name() + "_" + histogramAttribute.toUpperCase(),
"long", "RocksDBStat: " + histogramType.name(), true, false,
false));
}
}

return new MBeanInfo("", "RocksDBStat",
attributes.toArray(new MBeanAttributeInfo[0]), null, null, null);

public void unregister() {
MetricsSystem ms = DefaultMetricsSystem.instance();
ms.unregisterSource(this.contextName);
}

@Override
Expand All @@ -245,7 +154,13 @@ public void getMetrics(MetricsCollector metricsCollector, boolean b) {
* @param rb Metrics Record Builder.
*/
private void getHistogramData(MetricsRecordBuilder rb) {
if (this.statistics == null) {
return;
}
for (HistogramType histogramType : HistogramType.values()) {
if (histogramType.name().startsWith(BLOB_DB_PREFIX)) {
continue;
}
HistogramData histogram =
statistics.getHistogramData(
HistogramType.valueOf(histogramType.name()));
Expand All @@ -269,7 +184,13 @@ private void getHistogramData(MetricsRecordBuilder rb) {
* @param rb Metrics Record Builder.
*/
private void getTickerTypeData(MetricsRecordBuilder rb) {
if (this.statistics == null) {
return;
}
for (TickerType tickerType : TickerType.values()) {
if (tickerType.name().startsWith(BLOB_DB_PREFIX)) {
continue;
}
rb.addCounter(Interns.info(tickerType.name(), "RocksDBStat"),
statistics.getTickerCount(tickerType));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.hadoop.hdds.utils.db;

import javax.management.ObjectName;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
Expand All @@ -30,16 +29,15 @@
import java.util.Set;

import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hdds.HddsUtils;

import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.utils.RocksDBStoreMBean;
import org.apache.hadoop.hdds.utils.RocksDBStoreMetrics;
import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.hadoop.hdds.utils.db.RocksDatabase.ColumnFamily;
import org.apache.hadoop.hdds.utils.db.managed.ManagedCompactRangeOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedDBOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedTransactionLogIterator;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteOptions;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.ozone.rocksdiff.RocksDBCheckpointDiffer;

import com.google.common.annotations.VisibleForTesting;
Expand Down Expand Up @@ -67,7 +65,7 @@ public class RDBStore implements DBStore {
private final RocksDatabase db;
private final File dbLocation;
private final CodecRegistry codecRegistry;
private ObjectName statMBeanName;
private RocksDBStoreMetrics metrics;
private final RDBCheckpointManager checkPointManager;
private final String checkpointsParentDir;
private final String snapshotsParentDir;
Expand Down Expand Up @@ -122,20 +120,16 @@ public RDBStore(File dbFile, ManagedDBOptions dbOptions,
db = RocksDatabase.open(dbFile, dbOptions, writeOptions,
families, readOnly);

if (dbOptions.statistics() != null) {
Map<String, String> jmxProperties = new HashMap<>();
jmxProperties.put("dbName", dbJmxBeanName);
statMBeanName = HddsUtils.registerWithJmxProperties(
"Ozone", "RocksDbStore", jmxProperties,
RocksDBStoreMBean.create(dbOptions.statistics(), db,
dbJmxBeanName));
if (statMBeanName == null) {
LOG.warn("jmx registration failed during RocksDB init, db path :{}",
dbJmxBeanName);
} else {
LOG.debug("jmx registration succeed during RocksDB init, db path :{}",
dbJmxBeanName);
}
// dbOptions.statistics() only contribute to part of RocksDB metrics in
// Ozone. Enable RocksDB metrics even dbOptions.statistics() is off.
metrics = RocksDBStoreMetrics.create(dbOptions.statistics(), db,
dbJmxBeanName);
if (metrics == null) {
LOG.warn("Metrics registration failed during RocksDB init, " +
"db path :{}", dbJmxBeanName);
} else {
LOG.debug("Metrics registration succeed during RocksDB init, " +
"db path :{}", dbJmxBeanName);
}

//create checkpoints directory if not exists.
Expand Down Expand Up @@ -219,9 +213,9 @@ public void compactDB() throws IOException {

@Override
public void close() throws IOException {
if (statMBeanName != null) {
MBeans.unregister(statMBeanName);
statMBeanName = null;
if (metrics != null) {
metrics.unregister();
metrics = null;
}

RDBMetrics.unRegister();
Expand Down Expand Up @@ -275,12 +269,6 @@ public void commitBatchOperation(BatchOperation operation)
((RDBBatchOperation) operation).commit(db);
}


@VisibleForTesting
protected ObjectName getStatMBeanName() {
return statMBeanName;
}

@Override
public Table<byte[], byte[]> getTable(String name) throws IOException {
final ColumnFamily handle = db.getColumnFamily(name);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,8 @@

package org.apache.hadoop.hdds.utils.db;

import javax.management.MBeanServer;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
Expand Down Expand Up @@ -197,32 +195,6 @@ public void getEstimatedKeyCount() throws Exception {
|| rdbStore.getEstimatedKeyCount() < 220);
}

@Test
public void getStatMBeanName() throws Exception {

try (Table firstTable = rdbStore.getTable(families.get(1))) {
for (int y = 0; y < 100; y++) {
byte[] key =
RandomStringUtils.random(10).getBytes(StandardCharsets.UTF_8);
byte[] value =
RandomStringUtils.random(10).getBytes(StandardCharsets.UTF_8);
firstTable.put(key, value);
}
}
MBeanServer platformMBeanServer =
ManagementFactory.getPlatformMBeanServer();
Thread.sleep(2000);

Object keysWritten = platformMBeanServer
.getAttribute(rdbStore.getStatMBeanName(), "NUMBER_KEYS_WRITTEN");

Assertions.assertTrue(((Long) keysWritten) >= 99L);

Object dbWriteAverage = platformMBeanServer
.getAttribute(rdbStore.getStatMBeanName(), "DB_WRITE_AVERAGE");
Assertions.assertTrue((double) dbWriteAverage > 0);
}

@Test
public void getTable() throws Exception {
for (String tableName : families) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ public class SCMMetadataStoreImpl implements SCMMetadataStore {
private DBStore store;
private final OzoneConfiguration configuration;

private SCMMetadataStoreMetrics metrics;
private Map<String, Table<?, ?>> tableMap = new ConcurrentHashMap<>();

/**
Expand Down Expand Up @@ -198,8 +197,6 @@ public void start(OzoneConfiguration config)

checkAndPopulateTable(statefulServiceConfigTable,
STATEFUL_SERVICE_CONFIG.getName());

metrics = SCMMetadataStoreMetrics.create(this);
}
}

Expand All @@ -209,10 +206,6 @@ public void stop() throws Exception {
store.close();
store = null;
}
if (metrics != null) {
metrics.unRegister();
metrics = null;
}
}

@Override
Expand Down Expand Up @@ -335,8 +328,4 @@ private void checkAndPopulateTable(Table table, String name)
Map<String, Table<?, ?>> getTableMap() {
return tableMap;
}

SCMMetadataStoreMetrics getMetrics() {
return metrics;
}
}
Loading