Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class TestVolumeIOStatsWithPrometheusSink {
public void init() {
metrics = DefaultMetricsSystem.instance();
metrics.init("test");
sink = new PrometheusMetricsSink();
sink = new PrometheusMetricsSink("random");
metrics.register("Prometheus", "Prometheus", sink);
}

Expand Down
4 changes: 0 additions & 4 deletions hadoop-hdds/framework/dev-support/findbugsExcludeFile.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,6 @@
</Match>

<!-- Test -->
<Match>
<Class name="org.apache.hadoop.hdds.server.http.TestPrometheusMetricsSink"></Class>
<Bug pattern="RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdds.server.http.TestRatisDropwizardExports"></Class>
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ public BaseHttpServer(MutableConfigurationSource conf, String name)
conf.getBoolean(HddsConfigKeys.HDDS_PROFILER_ENABLED, false);

if (prometheusSupport) {
prometheusMetricsSink = new PrometheusMetricsSink();
prometheusMetricsSink = new PrometheusMetricsSink(name);
httpServer.getWebAppContext().getServletContext()
.setAttribute(PROMETHEUS_SINK, prometheusMetricsSink);
HddsPrometheusConfig prometheusConfig =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,14 @@
*/
package org.apache.hadoop.hdds.server.http;

import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;

import java.io.IOException;
import java.io.Writer;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.utils.DecayRpcSchedulerUtil;
import org.apache.hadoop.hdds.utils.PrometheusMetricsSinkUtil;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsRecord;
Expand All @@ -49,14 +45,10 @@ public class PrometheusMetricsSink implements MetricsSink {
Collections.synchronizedSortedMap(new TreeMap<>());
private Map<String, Map<String, String>> nextMetricLines =
Collections.synchronizedSortedMap(new TreeMap<>());
private final String servername;

private static final Pattern SPLIT_PATTERN =
Pattern.compile("(?<!(^|[A-Z_]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])");

private static final Pattern REPLACE_PATTERN =
Pattern.compile("[^a-zA-Z0-9]+");

public PrometheusMetricsSink() {
public PrometheusMetricsSink(String servername) {
this.servername = servername;
}

@Override
Expand All @@ -65,13 +57,16 @@ public void putMetrics(MetricsRecord metricsRecord) {
if (metric.type() == MetricType.COUNTER
|| metric.type() == MetricType.GAUGE) {

String metricName = DecayRpcSchedulerUtil
.splitMetricNameIfNeeded(metricsRecord.name(), metric.name());
String metricName =
PrometheusMetricsSinkUtil.getMetricName(metricsRecord.name(),
metric.name());

// If there is no username this should be null
String username = DecayRpcSchedulerUtil
.checkMetricNameForUsername(metricsRecord.name(), metric.name());
String username =
PrometheusMetricsSinkUtil.getUsername(metricsRecord.name(),
metric.name());

String key = prometheusName(
String key = PrometheusMetricsSinkUtil.prometheusName(
metricsRecord.name(), metricName);

String prometheusMetricKeyAsString =
Expand All @@ -98,56 +93,31 @@ private String getPrometheusMetricKeyAsString(MetricsRecord metricsRecord,
.append("{");
String sep = "";

// tagListWithUsernameIfNeeded() checks if username is null.
// If it's not then it returns a list with the existing
// metric tags and a username tag.
List<MetricsTag> metricTagList = DecayRpcSchedulerUtil
.tagListWithUsernameIfNeeded(metricsRecord, username);
List<MetricsTag> metricsTags =
PrometheusMetricsSinkUtil.addTags(key, username, servername,
metricsRecord.tags());

//add tags
for (MetricsTag tag : metricTagList) {
for (MetricsTag tag : metricsTags) {
String tagName = tag.name().toLowerCase();

//ignore specific tag which includes sub-hierarchy
if (!tagName.equals("numopenconnectionsperuser")) {
prometheusMetricKey.append(sep)
.append(tagName)
.append("=\"")
.append(tag.value())
.append("\"");
sep = ",";
if (tagName.equals("numopenconnectionsperuser")) {
continue;
}

prometheusMetricKey.append(sep)
.append(tagName)
.append("=\"")
.append(tag.value())
.append("\"");
sep = ",";
}
prometheusMetricKey.append("}");

return prometheusMetricKey.toString();
}

/**
* Convert CamelCase based names to lower-case names where the separator
* is the underscore, to follow prometheus naming conventions.
*/
public String prometheusName(String recordName,
String metricName) {

// RocksDB metric names already have underscores as delimiters,
// but record name is from DB file name and '.' (as in 'om.db') is invalid
if (StringUtils.isNotEmpty(recordName) &&
recordName.startsWith(ROCKSDB_CONTEXT_PREFIX)) {
return normalizeName(recordName) + "_" + metricName.toLowerCase();
}

String baseName = StringUtils.capitalize(recordName)
+ StringUtils.capitalize(metricName);
return normalizeName(baseName);
}

public static String normalizeName(String baseName) {
String[] parts = SPLIT_PATTERN.split(baseName);
String result = String.join("_", parts).toLowerCase();
return REPLACE_PATTERN.matcher(result).replaceAll("_");
}

@Override
public void flush() {
synchronized (this) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,10 @@
package org.apache.hadoop.hdds.utils;

import com.google.common.base.Strings;
import java.util.Optional;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;

import java.util.ArrayList;
import java.util.List;

/**
* Helper functions for DecayRpcScheduler
* metrics for Prometheus.
Expand All @@ -34,25 +31,13 @@ public final class DecayRpcSchedulerUtil {
private DecayRpcSchedulerUtil() {
}

private static final MetricsInfo USERNAME_INFO = new MetricsInfo() {
@Override
public String name() {
return "username";
}

@Override
public String description() {
return "caller username";
}
};

/**
* For Decay_Rpc_Scheduler, the metric name is in format
* "Caller(<callers_username>).Volume"
* or
* "Caller(<callers_username>).Priority"
* Split it and return the metric.
*
* <p>
* If the recordName doesn't belong to Decay_Rpc_Scheduler,
* then return the metricName as it is without making
* any changes to it.
Expand All @@ -78,6 +63,7 @@ public static String splitMetricNameIfNeeded(String recordName,
* For Decay_Rpc_Scheduler, split the metric name
* and then get the part that is in the format "Caller(<callers_username>)"
* and split it to return the username.
*
* @param recordName
* @param metricName
* @return caller username or null if not present
Expand All @@ -102,20 +88,33 @@ public static String checkMetricNameForUsername(String recordName,
return null;
}


/**
* MetricRecord.tags() is an unmodifiable collection of tags.
* Store it in a list, to modify it and add a username tag.
* @param metricsRecord
* @return the new list with the metric tags and the username tag
* Create a <tt>username</tt> metrics tag.
* @param username caller username
* @return empty optional if no metrics tag was created, otherwise
* optional of metrics tag.
*/
public static List<MetricsTag> tagListWithUsernameIfNeeded(
MetricsRecord metricsRecord, String username) {
List<MetricsTag> list = new ArrayList<>(metricsRecord.tags());

if (!Strings.isNullOrEmpty(username)) {
MetricsTag tag = new MetricsTag(USERNAME_INFO, username);
list.add(tag);
public static Optional<MetricsTag> createUsernameTag(String username) {
if (Strings.isNullOrEmpty(username)) {
return Optional.empty();
}
return list;

final String name = "username";
final String description = "caller username";
final MetricsInfo metricsInfo = new MetricsInfo() {
@Override
public String name() {
return name;
}

@Override
public String description() {
return description;
}
};
MetricsTag metricsTag = new MetricsTag(metricsInfo, username);
return Optional.of(metricsTag);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.utils;

import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metrics2.MetricsTag;

/**
* Util class for
* {@link org.apache.hadoop.hdds.server.http.PrometheusMetricsSink}.
*/
public final class PrometheusMetricsSinkUtil {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This class should be more generic and agnostic to any metrics specific operations. It would be best to have another final class called UgiMetricsUtil similar to DecayRpcSchedulerUtil and move the relevant methods under it.

Also, if it turns out that we have a dependency issue, it will be best to have the code that causes the issue in a new class and maybe move that class under another package.

private static final Pattern SPLIT_PATTERN =
Pattern.compile("(?<!(^|[A-Z_]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])");
private static final Pattern REPLACE_PATTERN =
Pattern.compile("[^a-zA-Z0-9]+");

/**
* Never constructed.
*/
private PrometheusMetricsSinkUtil() {
}

/**
* Adds necessary tags.
*
* @param key metrics entry key
* @param username caller username
* @param servername servername
* @param unmodifiableTags list of metrics tags
* @return modifiable list of metrics tags
*/
public static List<MetricsTag> addTags(String key, String username,
String servername, Collection<MetricsTag> unmodifiableTags) {
List<MetricsTag> metricTags = new ArrayList<>(unmodifiableTags);

Stream.of(DecayRpcSchedulerUtil.createUsernameTag(username),
UgiMetricsUtil.createServernameTag(key, servername))
.forEach(
metricsTag -> metricsTag.ifPresent(mt -> addTag(mt, metricTags)));

return metricTags;
}

/**
* Adds metric tag to a metrics tags.
* @param metricsTag metrics tag to be added
* @param metricsTags metrics tags where metrics tag needs to be added
*/
private static void addTag(MetricsTag metricsTag,
List<MetricsTag> metricsTags) {
metricsTags.add(metricsTag);
}

/**
* Convert CamelCase based names to lower-case names where the separator
* is the underscore, to follow prometheus naming conventions.
*/
public static String prometheusName(String recordName,
String metricName) {

// RocksDB metric names already have underscores as delimiters,
// but record name is from DB file name and '.' (as in 'om.db') is invalid
if (StringUtils.isNotEmpty(recordName) &&
recordName.startsWith(ROCKSDB_CONTEXT_PREFIX)) {
return normalizeName(recordName) + "_" + metricName.toLowerCase();
}

String baseName = StringUtils.capitalize(recordName)
+ StringUtils.capitalize(metricName);
return normalizeName(baseName);
}

/**
* Normalizes metrics tag key name.
* @param baseName
* @return normalized name.
*/
private static String normalizeName(String baseName) {
String[] parts = SPLIT_PATTERN.split(baseName);
String result = String.join("_", parts).toLowerCase();
return REPLACE_PATTERN.matcher(result).replaceAll("_");
}

public static String getMetricName(String recordName, String metricName) {
return DecayRpcSchedulerUtil.splitMetricNameIfNeeded(recordName,
metricName);
}

public static String getUsername(String recordName, String metricName) {
return DecayRpcSchedulerUtil.checkMetricNameForUsername(recordName,
metricName);
}
}
Loading