Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -452,4 +452,6 @@ StatusAndMessages queryUpgradeFinalizationProgress(

DecommissionScmResponseProto decommissionScm(
String scmId) throws IOException;

String getMetrics(String query) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -474,4 +474,6 @@ List<ContainerInfo> getListOfContainers(

DecommissionScmResponseProto decommissionScm(
String scmId) throws IOException;

String getMetrics(String query) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetPipelineResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerCountRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerCountResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetMetricsRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetMetricsResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.InSafeModeRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ListPipelineRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ListPipelineResponseProto;
Expand Down Expand Up @@ -1143,4 +1145,13 @@ public DecommissionScmResponseProto decommissionScm(
.getDecommissionScmResponse();
return response;
}

@Override
public String getMetrics(String query) throws IOException {
GetMetricsRequestProto request = GetMetricsRequestProto.newBuilder().setQuery(query).build();
GetMetricsResponseProto response = submitRequest(Type.GetMetrics,
builder -> builder.setGetMetricsRequest(request)).getGetMetricsResponse();
String metricsJsonStr = response.getMetricsJson();
return metricsJsonStr;
}
}
11 changes: 11 additions & 0 deletions hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ message ScmContainerLocationRequest {
optional DecommissionScmRequestProto decommissionScmRequest = 44;
optional SingleNodeQueryRequestProto singleNodeQueryRequest = 45;
optional GetContainersOnDecomNodeRequestProto getContainersOnDecomNodeRequest = 46;
optional GetMetricsRequestProto getMetricsRequest = 47;
}

message ScmContainerLocationResponse {
Expand Down Expand Up @@ -137,6 +138,7 @@ message ScmContainerLocationResponse {
optional DecommissionScmResponseProto decommissionScmResponse = 44;
optional SingleNodeQueryResponseProto singleNodeQueryResponse = 45;
optional GetContainersOnDecomNodeResponseProto getContainersOnDecomNodeResponse = 46;
optional GetMetricsResponseProto getMetricsResponse = 47;

enum Status {
OK = 1;
Expand Down Expand Up @@ -190,6 +192,7 @@ enum Type {
DecommissionScm = 40;
SingleNodeQuery = 41;
GetContainersOnDecomNode = 42;
GetMetrics = 43;
}

/**
Expand Down Expand Up @@ -618,6 +621,14 @@ message GetContainersOnDecomNodeResponseProto {
repeated ContainersOnDecomNodeProto containersOnDecomNode = 1;
}

message GetMetricsRequestProto {
optional string query = 1;
}

message GetMetricsResponseProto {
optional string metricsJson = 1;
}

/**
* Protocol used from an HDFS node to StorageContainerManager. See the request
* and response messages for details of the RPC calls.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm;

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.Set;
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.QueryExp;
import javax.management.ReflectionException;
import javax.management.RuntimeErrorException;
import javax.management.RuntimeMBeanException;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Class used to fetch metrics from MBeanServer.
*/
public class FetchMetrics {
private static final Logger LOG = LoggerFactory.getLogger(FetchMetrics.class);
private transient MBeanServer mBeanServer;
private transient JsonFactory jsonFactory;

public FetchMetrics() {
this.mBeanServer = ManagementFactory.getPlatformMBeanServer();
this.jsonFactory = new JsonFactory();
}

public String getMetrics(String qry) {
try {
JsonGenerator jg = null;
ByteArrayOutputStream opStream = new ByteArrayOutputStream();

try {
jg = this.jsonFactory.createGenerator(opStream, JsonEncoding.UTF8);
jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
jg.useDefaultPrettyPrinter();
jg.writeStartObject();
if (qry == null) {
qry = "*:*";
}
this.listBeans(jg, new ObjectName(qry));
} finally {
if (jg != null) {
jg.close();
}
}
return new String(opStream.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException | MalformedObjectNameException ex) {
LOG.error("Caught an exception while processing getMetrics request", ex);
}
return null;
}

private void listBeans(JsonGenerator jg, ObjectName qry)
throws IOException {
LOG.debug("Listing beans for " + qry);
Set<ObjectName> names = null;
names = this.mBeanServer.queryNames(qry, (QueryExp) null);
jg.writeArrayFieldStart("beans");
Iterator<ObjectName> it = names.iterator();

while (it.hasNext()) {
ObjectName oname = (ObjectName) it.next();
String code = "";

MBeanInfo minfo;
try {
minfo = this.mBeanServer.getMBeanInfo(oname);
code = minfo.getClassName();
String prs = "";

try {
if ("org.apache.commons.modeler.BaseModelMBean".equals(code)) {
prs = "modelerType";
code = (String) this.mBeanServer.getAttribute(oname, prs);
}
} catch (AttributeNotFoundException | MBeanException | RuntimeException | ReflectionException ex) {
LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", ex);
}
} catch (InstanceNotFoundException var17) {
continue;
} catch (IntrospectionException | ReflectionException ex) {
LOG.error("Problem while trying to process JMX query: " + qry + " with MBean " + oname, ex);
continue;
}
jg.writeStartObject();
jg.writeStringField("name", oname.toString());
jg.writeStringField("modelerType", code);
MBeanAttributeInfo[] attrs = minfo.getAttributes();
for (int i = 0; i < attrs.length; ++i) {
this.writeAttribute(jg, oname, attrs[i]);
}
jg.writeEndObject();
}
jg.writeEndArray();
}

private void writeAttribute(JsonGenerator jg, ObjectName oname, MBeanAttributeInfo attr) throws IOException {
if (attr.isReadable()) {
String attName = attr.getName();
if (!"modelerType".equals(attName)) {
if (attName.indexOf("=") < 0 && attName.indexOf(":") < 0 && attName.indexOf(" ") < 0) {
Object value = null;

try {
value = this.mBeanServer.getAttribute(oname, attName);
} catch (RuntimeMBeanException var7) {
if (var7.getCause() instanceof UnsupportedOperationException) {
LOG.debug("getting attribute " + attName + " of " + oname + " threw an exception", var7);
} else {
LOG.error("getting attribute " + attName + " of " + oname + " threw an exception", var7);
}
return;
} catch (RuntimeErrorException var8) {
LOG.error("getting attribute {} of {} threw an exception", new Object[]{attName, oname, var8});
return;
} catch (MBeanException | RuntimeException | ReflectionException ex) {
LOG.error("getting attribute " + attName + " of " + oname + " threw an exception", ex);
return;
} catch (AttributeNotFoundException | InstanceNotFoundException ex) {
return;
}
this.writeAttribute(jg, attName, value);
}
}
}
}

private void writeAttribute(JsonGenerator jg, String attName, Object value) throws IOException {
jg.writeFieldName(attName);
this.writeObject(jg, value);
}

private void writeObject(JsonGenerator jg, Object value) throws IOException {
if (value == null) {
jg.writeNull();
} else {
Class<?> c = value.getClass();
Object entry;
if (c.isArray()) {
jg.writeStartArray();
int len = Array.getLength(value);

for (int j = 0; j < len; ++j) {
entry = Array.get(value, j);
this.writeObject(jg, entry);
}

jg.writeEndArray();
} else if (value instanceof Number) {
Number n = (Number) value;
jg.writeNumber(n.toString());
} else if (value instanceof Boolean) {
Boolean b = (Boolean) value;
jg.writeBoolean(b);
} else if (value instanceof CompositeData) {
CompositeData cds = (CompositeData) value;
CompositeType comp = cds.getCompositeType();
Set<String> keys = comp.keySet();
jg.writeStartObject();
Iterator var7 = keys.iterator();

while (var7.hasNext()) {
String key = (String) var7.next();
this.writeAttribute(jg, key, cds.get(key));
}

jg.writeEndObject();
} else if (value instanceof TabularData) {
TabularData tds = (TabularData) value;
jg.writeStartArray();
Iterator var14 = tds.values().iterator();

while (var14.hasNext()) {
entry = var14.next();
this.writeObject(jg, entry);
}
jg.writeEndArray();
} else {
jg.writeString(value.toString());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetPipelineResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetSafeModeRuleStatusesRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetSafeModeRuleStatusesResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetMetricsRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetMetricsResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.InSafeModeRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.InSafeModeResponseProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ListPipelineRequestProto;
Expand Down Expand Up @@ -714,6 +716,12 @@ public ScmContainerLocationResponse processRequest(
.setDecommissionScmResponse(decommissionScm(
request.getDecommissionScmRequest()))
.build();
case GetMetrics:
return ScmContainerLocationResponse.newBuilder()
.setCmdType(request.getCmdType())
.setStatus(Status.OK)
.setGetMetricsResponse(getMetrics(request.getGetMetricsRequest()))
.build();
default:
throw new IllegalArgumentException(
"Unknown command type: " + request.getCmdType());
Expand Down Expand Up @@ -1287,4 +1295,8 @@ public DecommissionScmResponseProto decommissionScm(
return impl.decommissionScm(
request.getScmId());
}

public GetMetricsResponseProto getMetrics(GetMetricsRequestProto request) throws IOException {
return GetMetricsResponseProto.newBuilder().setMetricsJson(impl.getMetrics(request.getQuery())).build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.apache.hadoop.hdds.scm.ha.SCMRatisServer;
import org.apache.hadoop.hdds.scm.ha.SCMRatisServerImpl;
import org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo;
import org.apache.hadoop.hdds.scm.FetchMetrics;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
Expand Down Expand Up @@ -1373,4 +1374,10 @@ public DecommissionScmResponseProto decommissionScm(
}
return decommissionScmResponseBuilder.build();
}

@Override
public String getMetrics(String query) throws IOException {
FetchMetrics fetchMetrics = new FetchMetrics();
return fetchMetrics.getMetrics(query);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm.node;

import org.apache.hadoop.hdds.scm.FetchMetrics;
import org.junit.jupiter.api.Test;

import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static org.junit.jupiter.api.Assertions.assertTrue;

class TestFetchMetrics {
private static FetchMetrics fetchMetrics = new FetchMetrics();

@Test
public void testFetchAll() {
String result = fetchMetrics.getMetrics(null);
Pattern p = Pattern.compile("beans", Pattern.MULTILINE);
Matcher m = p.matcher(result);
assertTrue(m.find());
}

@Test
public void testFetchFiltered() {
String result = fetchMetrics.getMetrics("Hadoop:service=StorageContainerManager,name=NodeDecommissionMetrics");
Pattern p = Pattern.compile("beans", Pattern.MULTILINE);
Matcher m = p.matcher(result);
assertTrue(m.find());
}
}
Loading