Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions hbase-common/src/main/resources/hbase-default.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1786,6 +1786,14 @@ possible configurations would overwhelm and obscure the important.
ThreadPool.
</description>
</property>
<property>
<name>hbase.http.metrics.servlets</name>
<value>jmx,prometheus</value>
<description>
Comma separated list of servlet names to enable for metrics collection. Supported
servlets are jmx, metrics, prometheus
</description>
</property>
<property>
<name>hbase.replication.rpc.codec</name>
<value>org.apache.hadoop.hbase.codec.KeyValueCodecWithTags</value>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;

import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.yetus.audience.InterfaceAudience;

@InterfaceAudience.Private
public class MetricsExportHelper {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Make this class final and add a private constructor.


public static Collection<MetricsRecord> export() {
MetricsSystemImpl instance = (MetricsSystemImpl) DefaultMetricsSystem.instance();
MetricsBuffer metricsBuffer = instance.sampleMetrics();
List<MetricsRecord> metrics = new LinkedList<>();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use ArrayList. LinkedList will lead to a error prone warning.

for (MetricsBuffer.Entry entry : metricsBuffer) {
entry.records().forEach(metrics::add);
}
return metrics;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;

import java.util.Collection;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.impl.MetricsExportHelper;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category({ MetricsTests.class, SmallTests.class })
public class TestMetricsExportHelper {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMetricsExportHelper.class);

@Test
public void testExportHelper() {
DefaultMetricsSystem.initialize("exportHelperTestSystem");
DefaultMetricsSystem.instance().start();

String metricsName = "exportMetricsTestGrp";
String gaugeName = "exportMetricsTestGauge";
String counterName = "exportMetricsTestCounter";

BaseSourceImpl baseSource = new BaseSourceImpl(metricsName, "", metricsName, metricsName);

baseSource.setGauge(gaugeName, 0);
baseSource.incCounters(counterName, 1);

Collection<MetricsRecord> metrics = MetricsExportHelper.export();
DefaultMetricsSystem.instance().stop();

Assert.assertTrue(metrics.stream().anyMatch(mr -> mr.name().equals(metricsName)));
Assert.assertTrue(gaugeName + " is missing in the export",
contains(metrics, metricsName, gaugeName));
Assert.assertTrue(counterName + " is missing in the export",
contains(metrics, metricsName, counterName));
}

private boolean contains(Collection<MetricsRecord> metrics, String metricsName,
String metricName) {
return metrics.stream().filter(mr -> mr.name().equals(metricsName)).anyMatch(mr -> {
for (AbstractMetric metric : mr.metrics()) {
if (metric.name().equals(metricName)) return true;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New line with '{}', otherwise there will be a checkstyle warning.

}
return false;
});
}
}
13 changes: 13 additions & 0 deletions hbase-http/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,19 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.conf.ConfServlet;
import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
import org.apache.hadoop.hbase.http.log.LogLevel;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
Expand Down Expand Up @@ -154,6 +153,20 @@ public class HttpServer implements FilterContainer {
public static final String NO_CACHE_FILTER = "NoCacheFilter";
public static final String APP_DIR = "webapps";

public static final String METRIC_SERVLETS_CONF_KEY = "hbase.http.metrics.servlets";
public static final String METRICS_SERVLETS_DEFAULT[] = { "jmx" };
private static final Map<String, ServletConfig> METRIC_SERVLETS =
new HashMap<String, ServletConfig>() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use ImmutableMap.Builder in guava. In hbase we have a shaded guava, under the org.apache.hbase.thirdparty.com.google package. The current code will lead to a error prone warning.

{
put("jmx",
new ServletConfig("jmx", "/jmx", "org.apache.hadoop.hbase.http.jmx.JMXJsonServlet"));
put("metrics",
new ServletConfig("metrics", "/metrics", "org.apache.hadoop.metrics.MetricsServlet"));
put("prometheus", new ServletConfig("prometheus", "/prometheus",
"org.apache.hadoop.hbase.http.prometheus.PrometheusHadoop2Servlet"));
}
};

private final AccessControlList adminsAcl;

protected final Server webServer;
Expand Down Expand Up @@ -751,16 +764,7 @@ protected void addDefaultServlets(ContextHandlerCollection contexts, Configurati
// set up default servlets
addPrivilegedServlet("stacks", "/stacks", StackServlet.class);
addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
// Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's
// MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2.
// Remove when we drop support for hbase on hadoop2.x.
try {
Class<?> clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class));
} catch (Exception e) {
// do nothing
}
addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class);

// While we don't expect users to have sensitive information in their configuration, they
// might. Give them an option to not expose the service configuration to all users.
if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) {
Expand All @@ -784,6 +788,20 @@ protected void addDefaultServlets(ContextHandlerCollection contexts, Configurati
LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property "
+ "not specified. Disabling /prof endpoint.");
}

/* register metrics servlets */
String enabledServlets[] = conf.getStrings(METRIC_SERVLETS_CONF_KEY, METRICS_SERVLETS_DEFAULT);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

String enabledServlets[] -> String[] enabledServlets

for (String enabledServlet : enabledServlets) {
try {
ServletConfig servletConfig = METRIC_SERVLETS.get(enabledServlet);
Class<?> clz = Class.forName(servletConfig.getClazz());
addPrivilegedServlet(servletConfig.getName(), servletConfig.getPathSpec(),
clz.asSubclass(HttpServlet.class));
} catch (Exception e) {
/* shouldn't be fatal, so warn the user about it */
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm surprised that having metrics be optional was accepted by review. Before this change, it was impossible to run an HBase processes that exposes no metrics at all. Now, that can happen. We do not have other healthcheck endpoints like a modern web service, metrics are the only lifeline for an operations team (besides inspecting the pid). I think that we should be more careful here:

  • we should warn if the process is configured without any metrics endpoint
  • we should warn if a configured metric endpoint fails to load
  • we should abort the process launch if the process is configured with any metrics but none load

Copy link
Contributor

@Apache9 Apache9 Aug 24, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For me, I think the current implementation is enough. We will warn if a registered endpoint fails to load. And the default configuration is to load all metrics endpoints, if users configured it to none, they must have a reason, so I do not think here we need to warn it. And I also do not think we should abort the process if none metrics can be loaded, as it does not affect the normal read/write.

LOG.warn("Couldn't register the servlet " + enabledServlet, e);
}
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;

import org.apache.yetus.audience.InterfaceAudience;

/* pojo to hold the servlet info */

@InterfaceAudience.Private
class ServletConfig {
private String name;
private String pathSpec;
private String clazz;

public ServletConfig(String name, String pathSpec, String clazz) {
this.name = name;
this.pathSpec = pathSpec;
this.clazz = clazz;
}

public String getName() {
return name;
}

public String getPathSpec() {
return pathSpec;
}

public String getClazz() {
return clazz;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http.prometheus;

import com.google.errorprone.annotations.RestrictedApi;
import java.io.IOException;
import java.io.Writer;
import java.util.Collection;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.MetricsExportHelper;
import org.apache.yetus.audience.InterfaceAudience;

@InterfaceAudience.Private
public class PrometheusHadoopServlet extends HttpServlet {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this called a "Hadoop" servlet?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because it exports the metrics we registered to hadoop metrics system?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh no, why was this implemented as a raw servlet :'(

We have Jersey. There is absolutely no reason to implement a servlet by hand.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do not see any advantages here to use jersey? The servlet is really easy to implement...


private static final Pattern SPLIT_PATTERN =
Pattern.compile("(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=([A-Z][a-z]))|\\W|(_)+");

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
writeMetrics(resp.getWriter());
}

static String toPrometheusName(String metricRecordName, String metricName) {
String baseName = metricRecordName + StringUtils.capitalize(metricName);
String[] parts = SPLIT_PATTERN.split(baseName);
return String.join("_", parts).toLowerCase();
}

/*
* SimpleClient for Prometheus is not used, because the format is very easy to implement and this
* solution doesn't add any dependencies to the project. You can check the Prometheus format here:
* https://prometheus.io/docs/instrumenting/exposition_formats/
*/
@RestrictedApi(explanation = "Should only be called in tests", link = "",
allowedOnPath = ".*/src/test/.*")
void writeMetrics(Writer writer) throws IOException {
Collection<MetricsRecord> metricRecords = MetricsExportHelper.export();
for (MetricsRecord metricsRecord : metricRecords) {
for (AbstractMetric metrics : metricsRecord.metrics()) {
if (metrics.type() == MetricType.COUNTER || metrics.type() == MetricType.GAUGE) {

String key = toPrometheusName(metricsRecord.name(), metrics.name());
writer.append("# TYPE ").append(key).append(" ")
.append(metrics.type().toString().toLowerCase()).append("\n").append(key).append("{");

/* add tags */
String sep = "";
for (MetricsTag tag : metricsRecord.tags()) {
String tagName = tag.name().toLowerCase();
writer.append(sep).append(tagName).append("=\"").append(tag.value()).append("\"");
sep = ",";
}
writer.append("} ");
writer.append(metrics.value().toString()).append('\n');
}
}
}
writer.flush();
}
}
Loading