From bf4c68eb9e80466cf285271b0d37d9a117f7ceca Mon Sep 17 00:00:00 2001 From: xBis7 Date: Fri, 14 Oct 2022 13:53:02 +0300 Subject: [PATCH 01/18] hostname parameter for datanode subcommands --- .../hadoop/hdds/DFSConfigKeysLegacy.java | 3 +- .../hadoop/hdds/scm/client/ScmClient.java | 6 +- .../StorageContainerLocationProtocol.java | 6 +- ...ocationProtocolClientSideTranslatorPB.java | 5 +- .../src/main/proto/ScmAdminProtocol.proto | 1 + .../src/main/resources/proto.lock | 5 ++ .../scm/node/NodeDecommissionManager.java | 16 +--- .../hadoop/hdds/scm/node/NodeManager.java | 13 ++- .../hadoop/hdds/scm/node/SCMNodeManager.java | 79 +++++++++++++------ ...ocationProtocolServerSideTranslatorPB.java | 8 +- .../scm/server/SCMBlockProtocolServer.java | 2 +- .../scm/server/SCMClientProtocolServer.java | 12 ++- .../hdds/scm/container/MockNodeManager.java | 29 ++++++- .../scm/container/SimpleMockNodeManager.java | 7 +- .../hdds/scm/node/TestSCMNodeManager.java | 50 +++++------- .../testutils/ReplicationNodeManagerMock.java | 7 +- .../scm/cli/ContainerOperationClient.java | 14 +++- .../scm/cli/datanode/ListInfoSubcommand.java | 11 ++- .../scm/cli/datanode/UsageInfoSubcommand.java | 18 +++-- .../main/smoketest/admincli/datanode.robot | 2 +- .../hadoop/ozone/TestContainerOperations.java | 2 +- 21 files changed, 196 insertions(+), 100 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java index ef88a79dcb85..0cf1d12b5257 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java @@ -39,11 +39,10 @@ private DFSConfigKeysLegacy() { public static final String DFS_DATANODE_DATA_DIR_KEY = "dfs.datanode.data.dir"; + @Deprecated public static final String DFS_DATANODE_USE_DN_HOSTNAME = "dfs.datanode.use.datanode.hostname"; - public static final boolean DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT = false; - public static final String DFS_XFRAME_OPTION_ENABLED = "dfs.xframe.enabled"; public static final boolean DFS_XFRAME_OPTION_ENABLED_DEFAULT = true; diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java index daeb5c5ddfca..d8ecc92d67f9 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java @@ -368,16 +368,18 @@ StartContainerBalancerResponseProto startContainerBalancer( int resetDeletedBlockRetryCount(List txIDs) throws IOException; /** - * Get usage information of datanode by ipaddress or uuid. + * Get usage information of datanode by ipaddress or uuid or hostname. * * @param ipaddress datanode ipaddress String * @param uuid datanode uuid String + * @param hostname datanode hostname String * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMused, and remaining space. * @throws IOException */ List getDatanodeUsageInfo(String ipaddress, - String uuid) + String uuid, + String hostname) throws IOException; /** diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index 34bd2748f688..04cac9878560 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -386,10 +386,11 @@ StartContainerBalancerResponseProto startContainerBalancer( boolean getContainerBalancerStatus() throws IOException; /** - * Get Datanode usage information by ip or uuid. + * Get Datanode usage information by ip or uuid or hostname. * * @param ipaddress datanode IP address String * @param uuid datanode UUID String + * @param hostname datanode hostname address String * @param clientVersion Client's version number * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMused, and remaining space. @@ -397,7 +398,8 @@ StartContainerBalancerResponseProto startContainerBalancer( * @see org.apache.hadoop.ozone.ClientVersion */ List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException; + String ipaddress, String uuid, + String hostname, int clientVersion) throws IOException; /** * Get usage information of most or least used datanodes. diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java index 488d970cf2bb..e7b29a2d3fd0 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java @@ -903,18 +903,21 @@ public boolean getContainerBalancerStatus() throws IOException { * * @param ipaddress Address String * @param uuid UUID String + * @param hostname Hostname String * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMUsed, and remaining space. * @throws IOException */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException { + String ipaddress, String uuid, + String hostname, int clientVersion) throws IOException { DatanodeUsageInfoRequestProto request = DatanodeUsageInfoRequestProto.newBuilder() .setIpaddress(ipaddress) .setUuid(uuid) + .setHostname(hostname) .build(); DatanodeUsageInfoResponseProto response = diff --git a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto index ccb5e2155e44..0ba6e7696d8c 100644 --- a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto +++ b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto @@ -319,6 +319,7 @@ message DatanodeUsageInfoRequestProto { optional string uuid = 2; optional bool mostUsed = 3; optional uint32 count = 4; + optional string hostname = 5; } message DatanodeUsageInfoResponseProto { diff --git a/hadoop-hdds/interface-admin/src/main/resources/proto.lock b/hadoop-hdds/interface-admin/src/main/resources/proto.lock index ec40a30649d4..d8d88067487d 100644 --- a/hadoop-hdds/interface-admin/src/main/resources/proto.lock +++ b/hadoop-hdds/interface-admin/src/main/resources/proto.lock @@ -888,6 +888,11 @@ "id": 4, "name": "count", "type": "uint32" + }, + { + "id": 5, + "name": "hostname", + "type": "string" } ] }, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java index 1ea04cdfc3ce..0bed542f263c 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hdds.scm.ha.SCMContext; import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException; import org.apache.hadoop.hdds.server.events.EventPublisher; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,7 +56,6 @@ public class NodeDecommissionManager { private EventPublisher eventQueue; private ReplicationManager replicationManager; private OzoneConfiguration conf; - private boolean useHostnames; private long monitorInterval; private static final Logger LOG = @@ -115,13 +113,9 @@ private List mapHostnamesToDatanodes(List hosts) throw new InvalidHostStringException("Unable to resolve host " + host.getRawHostname(), e); } - String dnsName; - if (useHostnames) { - dnsName = addr.getHostName(); - } else { - dnsName = addr.getHostAddress(); - } - List found = nodeManager.getNodesByAddress(dnsName); + String dnsName = addr.getHostAddress(); + + List found = nodeManager.getNodesByIpAddress(dnsName); if (found.size() == 0) { throw new InvalidHostStringException("Host " + host.getRawHostname() + " (" + dnsName + ") is not running any datanodes registered" @@ -186,10 +180,6 @@ public NodeDecommissionManager(OzoneConfiguration config, NodeManager nm, new ThreadFactoryBuilder().setNameFormat("DatanodeAdminManager-%d") .setDaemon(true).build()); - useHostnames = conf.getBoolean( - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); - monitorInterval = conf.getTimeDuration( ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL, ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL_DEFAULT, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java index 8f72375bcd4f..2c17b4a32eb5 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java @@ -347,13 +347,22 @@ int getNodeQueuedCommandCount(DatanodeDetails datanodeDetails, DatanodeDetails getNodeByUuid(String uuid); /** - * Given datanode address(Ipaddress or hostname), returns a list of + * Given datanode address(Ipaddress), returns a list of * DatanodeDetails for the datanodes running at that address. * * @param address datanode address * @return the given datanode, or empty list if none found */ - List getNodesByAddress(String address); + List getNodesByIpAddress(String address); + + /** + * Given datanode hostname, returns a list of + * DatanodeDetails for the datanodes running at that hostname. + * + * @param hostname datanode hostname address + * @return the given datanode, or empty list if none found + */ + List getNodesByHostName(String hostname); /** * Get cluster map as in network topology for this node manager. diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index c5ec348f2a96..a16ba7bd99c8 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -121,9 +121,10 @@ public class SCMNodeManager implements NodeManager { private final SCMStorageConfig scmStorageConfig; private final NetworkTopology clusterMap; private final DNSToSwitchMapping dnsToSwitchMapping; - private final boolean useHostname; private final ConcurrentHashMap> dnsToUuidMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap> hostNmToUuidMap = + new ConcurrentHashMap<>(); private final int numPipelinesPerMetadataVolume; private final int heavyNodeCriteria; private final HDDSLayoutVersionManager scmLayoutVersionManager; @@ -159,9 +160,6 @@ public SCMNodeManager(OzoneConfiguration conf, this.dnsToSwitchMapping = ((newInstance instanceof CachedDNSToSwitchMapping) ? newInstance : new CachedDNSToSwitchMapping(newInstance)); - this.useHostname = conf.getBoolean( - DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, - DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); this.numPipelinesPerMetadataVolume = conf.getInt(ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME, ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME_DEFAULT); @@ -365,21 +363,14 @@ public RegisteredCommand register( InetAddress dnAddress = Server.getRemoteIp(); if (dnAddress != null) { // Mostly called inside an RPC, update ip - if (!useHostname) { - datanodeDetails.setHostName(dnAddress.getHostName()); - } + datanodeDetails.setHostName(dnAddress.getHostName()); datanodeDetails.setIpAddress(dnAddress.getHostAddress()); } - String dnsName; - String networkLocation; + String dnsName = datanodeDetails.getIpAddress(); + String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - if (useHostname) { - dnsName = datanodeDetails.getHostName(); - } else { - dnsName = datanodeDetails.getIpAddress(); - } - networkLocation = nodeResolve(dnsName); + String networkLocation = nodeResolve(dnsName); if (networkLocation != null) { datanodeDetails.setNetworkLocation(networkLocation); } @@ -392,6 +383,7 @@ public RegisteredCommand register( DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); addEntryToDnsToUuidMap(dnsName, datanodeDetails.getUuidString()); + addEntryToHostNmToUuidMap(hostName, datanodeDetails.getUuidString()); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); LOG.info("Registered Data node : {}", datanodeDetails); @@ -419,16 +411,14 @@ public RegisteredCommand register( datanodeDetails); clusterMap.update(datanodeInfo, datanodeDetails); - String oldDnsName; - if (useHostname) { - oldDnsName = datanodeInfo.getHostName(); - } else { - oldDnsName = datanodeInfo.getIpAddress(); - } + String oldDnsName = datanodeInfo.getIpAddress(); + String oldHostName = datanodeInfo.getHostName(); updateEntryFromDnsToUuidMap(oldDnsName, dnsName, datanodeDetails.getUuidString()); - + updateEntryFromHostNmToUuidMap(oldHostName, + hostName, + datanodeDetails.getUuidString()); nodeStateManager.updateNode(datanodeDetails, layoutInfo); DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); @@ -468,6 +458,17 @@ private synchronized void addEntryToDnsToUuidMap( dnList.add(uuid); } + @SuppressFBWarnings(value = "AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION") + private synchronized void addEntryToHostNmToUuidMap( + String hostName, String uuid) { + Set dnList = hostNmToUuidMap.get(hostName); + if (dnList == null) { + dnList = ConcurrentHashMap.newKeySet(); + hostNmToUuidMap.put(hostName, dnList); + } + dnList.add(uuid); + } + private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { if (!dnsToUuidMap.containsKey(dnsName)) { return; @@ -481,6 +482,19 @@ private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { } } + private synchronized void removeEntryFromHostNmToUuidMap(String hostName) { + if (!hostNmToUuidMap.containsKey(hostName)) { + return; + } + Set dnSet = hostNmToUuidMap.get(hostName); + if (dnSet.contains(hostName)) { + dnSet.remove(hostName); + } + if (dnSet.isEmpty()) { + hostNmToUuidMap.remove(hostName); + } + } + private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, String newDnsName, String uuid) { @@ -488,6 +502,13 @@ private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, addEntryToDnsToUuidMap(newDnsName, uuid); } + private synchronized void updateEntryFromHostNmToUuidMap(String oldHostName, + String newHostName, + String uuid) { + removeEntryFromHostNmToUuidMap(oldHostName); + addEntryToHostNmToUuidMap(newHostName, uuid); + } + /** * Send heartbeat to indicate the datanode is alive and doing well. * @@ -1217,13 +1238,23 @@ public DatanodeDetails getNodeByUuid(String uuid) { * @return the given datanode, or empty list if none found */ @Override - public List getNodesByAddress(String address) { + public List getNodesByIpAddress(String address) { + return getNodesByAddress(address, dnsToUuidMap); + } + + @Override + public List getNodesByHostName(String hostname) { + return getNodesByAddress(hostname, hostNmToUuidMap); + } + + private List getNodesByAddress( + String address, ConcurrentHashMap> addressToUuidMap) { List results = new LinkedList<>(); if (Strings.isNullOrEmpty(address)) { LOG.warn("address is null"); return results; } - Set uuids = dnsToUuidMap.get(address); + Set uuids = addressToUuidMap.get(address); if (uuids == null) { LOG.warn("Cannot find node for address {}", address); return results; diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java index e59c984174f5..ec0638995d15 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java @@ -1126,10 +1126,12 @@ public DatanodeUsageInfoResponseProto getDatanodeUsageInfo( request, int clientVersion) throws IOException { List infoList; - // get info by ip or uuid - if (request.hasUuid() || request.hasIpaddress()) { + // get info by ip or uuid or hostname + if (request.hasUuid() || + request.hasIpaddress() || + request.hasHostname()) { infoList = impl.getDatanodeUsageInfo(request.getIpaddress(), - request.getUuid(), clientVersion); + request.getUuid(), request.getHostname(), clientVersion); } else { // get most or least used nodes infoList = impl.getDatanodeUsageInfo(request.getMostUsed(), request.getCount(), clientVersion); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java index 600932c23495..a30324bc4bbf 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java @@ -328,7 +328,7 @@ public List sortDatanodes(List nodes, NodeManager nodeManager = scm.getScmNodeManager(); Node client = null; List possibleClients = - nodeManager.getNodesByAddress(clientMachine); + nodeManager.getNodesByIpAddress(clientMachine); if (possibleClients.size() > 0) { client = possibleClients.get(0); } diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java index 21d179b59e45..9b1273ecc599 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java @@ -1001,15 +1001,17 @@ public boolean getContainerBalancerStatus() { * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip * or uuid. * - * @param ipaddress Datanode Address String + * @param ipaddress Datanode Address IP String * @param uuid Datanode UUID String + * @param hostname Datanode Address hostname String * @return List of DatanodeUsageInfoProto. Each element contains usage info * such as capacity, SCMUsed, and remaining space. * @throws IOException if admin authentication fails */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException { + String ipaddress, String uuid, + String hostname, int clientVersion) throws IOException { // check admin authorisation try { @@ -1019,12 +1021,14 @@ public List getDatanodeUsageInfo( throw e; } - // get datanodes by ip or uuid + // get datanodes by ip or uuid or hostname List nodes = new ArrayList<>(); if (!Strings.isNullOrEmpty(uuid)) { nodes.add(scm.getScmNodeManager().getNodeByUuid(uuid)); } else if (!Strings.isNullOrEmpty(ipaddress)) { - nodes = scm.getScmNodeManager().getNodesByAddress(ipaddress); + nodes = scm.getScmNodeManager().getNodesByIpAddress(ipaddress); + } else if (!Strings.isNullOrEmpty(hostname)) { + nodes = scm.getScmNodeManager().getNodesByHostName(hostname); } else { throw new IOException( "Could not get datanode with the specified parameters." diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java index e1eaf251f51c..386ae1475469 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java @@ -111,6 +111,7 @@ public class MockNodeManager implements NodeManager { private final Node2ContainerMap node2ContainerMap; private NetworkTopology clusterMap; private ConcurrentMap> dnsToUuidMap; + private ConcurrentMap> hostNmToUuidMap; private int numHealthyDisksPerDatanode; private int numRaftLogDisksPerDatanode; private int numPipelinePerDatanode; @@ -123,6 +124,7 @@ public class MockNodeManager implements NodeManager { this.node2PipelineMap = new Node2PipelineMap(); this.node2ContainerMap = new Node2ContainerMap(); this.dnsToUuidMap = new ConcurrentHashMap<>(); + this.hostNmToUuidMap = new ConcurrentHashMap<>(); this.aggregateStat = new SCMNodeStat(); this.clusterMap = new NetworkTopologyImpl(new OzoneConfiguration()); } @@ -727,6 +729,8 @@ public RegisteredCommand register(DatanodeDetails datanodeDetails, Collections.emptySet()); addEntryTodnsToUuidMap(datanodeDetails.getIpAddress(), datanodeDetails.getUuidString()); + addEntryToHostNmToUuidMap(datanodeDetails.getHostName(), + datanodeDetails.getUuidString()); if (clusterMap != null) { datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); clusterMap.add(datanodeDetails); @@ -754,6 +758,16 @@ private synchronized void addEntryTodnsToUuidMap( dnList.add(uuid); } + private synchronized void addEntryToHostNmToUuidMap( + String dnsName, String uuid) { + Set dnList = hostNmToUuidMap.get(dnsName); + if (dnList == null) { + dnList = ConcurrentHashMap.newKeySet(); + hostNmToUuidMap.put(dnsName, dnList); + } + dnList.add(uuid); + } + /** * Send heartbeat to indicate the datanode is alive and doing well. * @@ -855,9 +869,20 @@ public DatanodeDetails getNodeByUuid(String uuid) { } @Override - public List getNodesByAddress(String address) { + public List getNodesByIpAddress(String address) { + return getNodesByAddress(address, dnsToUuidMap); + } + + @Override + public List getNodesByHostName(String hostName) { + return getNodesByAddress(hostName, hostNmToUuidMap); + } + + private List getNodesByAddress( + String address, ConcurrentMap> addressToUuidMap) { List results = new LinkedList<>(); - Set uuids = dnsToUuidMap.get(address); + Set uuids = addressToUuidMap.get(address); + if (uuids == null) { return results; } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java index 22e01b977093..75ff52c20236 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java @@ -333,7 +333,12 @@ public DatanodeDetails getNodeByUuid(String uuid) { } @Override - public List getNodesByAddress(String address) { + public List getNodesByIpAddress(String address) { + return null; + } + + @Override + public List getNodesByHostName(String hostName) { return null; } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index 73dc7b61759c..63d9a7d8423b 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -33,7 +33,6 @@ import java.util.concurrent.TimeoutException; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdds.DFSConfigKeysLegacy; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.client.RatisReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationConfig; @@ -1751,7 +1750,7 @@ public void testHandlingSCMCommandEvent() @Test public void testScmRegisterNodeWithIpAddress() throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(false); + testScmRegisterNodeWithNetworkTopology(); } /** @@ -1761,7 +1760,7 @@ public void testScmRegisterNodeWithIpAddress() @Test public void testScmRegisterNodeWithHostname() throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(true); + testScmRegisterNodeWithNetworkTopology(); } /** @@ -1771,7 +1770,7 @@ public void testScmRegisterNodeWithHostname() @Test public void testgetNodesByAddressWithIpAddress() throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(false); + testGetNodesByAddress(); } /** @@ -1780,7 +1779,7 @@ public void testgetNodesByAddressWithIpAddress() @Test public void testgetNodesByAddressWithHostname() throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(true); + testGetNodesByAddress(); } /** @@ -1828,7 +1827,7 @@ public void testScmRegisterNodeWith4LayerNetworkTopology() } } - private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) + private void testScmRegisterNodeWithNetworkTopology() throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1844,9 +1843,6 @@ private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) conf.set(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, "org.apache.hadoop.net.TableMapping"); conf.set(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, mapFile); - if (useHostname) { - conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); - } final int nodeCount = hostNames.length; // use default IP address to resolve node try (SCMNodeManager nodeManager = createNodeManager(conf)) { @@ -1868,13 +1864,10 @@ private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) assertEquals("/rack1", node.getNetworkLocation())); // test get node - if (useHostname) { - Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, - nodeManager.getNodesByAddress(hostname).size())); - } else { - Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, - nodeManager.getNodesByAddress(ip).size())); - } + Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, + nodeManager.getNodesByHostName(hostname).size())); + Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, + nodeManager.getNodesByIpAddress(ip).size())); } } @@ -1943,7 +1936,7 @@ public void testGetNodeInfo() /** * Test add node into a 4-layer network topology during node register. */ - private void testGetNodesByAddress(boolean useHostname) + private void testGetNodesByAddress() throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1953,10 +1946,6 @@ private void testGetNodesByAddress(boolean useHostname) String[] hostNames = {"host1", "host1", "host2", "host3", "host4"}; String[] ipAddress = {"1.2.3.4", "1.2.3.4", "2.3.4.5", "3.4.5.6", "4.5.6.7"}; - - if (useHostname) { - conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); - } final int nodeCount = hostNames.length; try (SCMNodeManager nodeManager = createNodeManager(conf)) { for (int i = 0; i < nodeCount; i++) { @@ -1965,16 +1954,15 @@ private void testGetNodesByAddress(boolean useHostname) nodeManager.register(node, null, null); } // test get node - assertEquals(0, nodeManager.getNodesByAddress(null).size()); - if (useHostname) { - assertEquals(2, nodeManager.getNodesByAddress("host1").size()); - assertEquals(1, nodeManager.getNodesByAddress("host2").size()); - assertEquals(0, nodeManager.getNodesByAddress("unknown").size()); - } else { - assertEquals(2, nodeManager.getNodesByAddress("1.2.3.4").size()); - assertEquals(1, nodeManager.getNodesByAddress("2.3.4.5").size()); - assertEquals(0, nodeManager.getNodesByAddress("1.9.8.7").size()); - } + assertEquals(0, nodeManager.getNodesByIpAddress(null).size()); + + assertEquals(2, nodeManager.getNodesByHostName("host1").size()); + assertEquals(1, nodeManager.getNodesByHostName("host2").size()); + assertEquals(0, nodeManager.getNodesByHostName("unknown").size()); + + assertEquals(2, nodeManager.getNodesByIpAddress("1.2.3.4").size()); + assertEquals(1, nodeManager.getNodesByIpAddress("2.3.4.5").size()); + assertEquals(0, nodeManager.getNodesByIpAddress("1.9.8.7").size()); } } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java index 436bccb09db8..bd5c448614b4 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java @@ -481,7 +481,12 @@ public DatanodeDetails getNodeByUuid(String address) { } @Override - public List getNodesByAddress(String address) { + public List getNodesByIpAddress(String address) { + return new LinkedList<>(); + } + + @Override + public List getNodesByHostName(String hostName) { return new LinkedList<>(); } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java index 47f3d412579b..c0c98e1a212a 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java @@ -469,11 +469,21 @@ public int resetDeletedBlockRetryCount(List txIDs) throws IOException { return storageContainerLocationClient.resetDeletedBlockRetryCount(txIDs); } + /** + * Get Datanode Usage information by ipaddress or uuid or hostname. + * + * @param ipaddress datanode ipaddress String + * @param uuid datanode uuid String + * @param hostname datanode hostname String + * @return List of DatanodeUsageInfoProto. Each element contains info such as + * capacity, SCMused, and remaining space. + * @throws IOException + */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid) throws IOException { + String ipaddress, String uuid, String hostname) throws IOException { return storageContainerLocationClient.getDatanodeUsageInfo(ipaddress, - uuid, ClientVersion.CURRENT_VERSION); + uuid, hostname, ClientVersion.CURRENT_VERSION); } @Override diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java index 6e6e3cf0ecf4..1520ec962a80 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java @@ -46,11 +46,16 @@ public class ListInfoSubcommand extends ScmSubcommand { defaultValue = "") private String ipaddress; - @CommandLine.Option(names = {"--id"}, + @CommandLine.Option(names = {"--uuid"}, description = "Show info by datanode UUID.", defaultValue = "") private String uuid; + @CommandLine.Option(names = {"--hostname"}, + description = "Show info by datanode hostname.", + defaultValue = "") + private String hostname; + @CommandLine.Option(names = {"--operational-state"}, description = "Show info by datanode NodeOperationalState(" + "IN_SERVICE, " + @@ -76,6 +81,10 @@ public void execute(ScmClient scmClient) throws IOException { allNodes = allNodes.filter(p -> p.getDatanodeDetails().getIpAddress() .compareToIgnoreCase(ipaddress) == 0); } + if (!Strings.isNullOrEmpty(hostname)) { + allNodes = allNodes.filter(p -> p.getDatanodeDetails().getHostName() + .compareToIgnoreCase(hostname) == 0); + } if (!Strings.isNullOrEmpty(uuid)) { allNodes = allNodes.filter(p -> p.getDatanodeDetails().getUuidString().equals(uuid)); diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java index d404a6ce1e28..08aff8d06be9 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java @@ -45,7 +45,7 @@ name = "usageinfo", description = "List usage information " + "(such as Capacity, SCMUsed, Remaining) of a datanode by IP address " + - "or UUID", + "or UUID or Host name", mixinStandardHelpOptions = true, versionProvider = HddsVersionProvider.class) public class UsageInfoSubcommand extends ScmSubcommand { @@ -69,6 +69,10 @@ private static class ExclusiveArguments { "Show info by datanode UUID.", defaultValue = "") private String uuid; + @CommandLine.Option(names = {"--hostname"}, paramLabel = "HOSTNAME", + description = "Show info by datanode hostname.", defaultValue = "") + private String hostname; + @CommandLine.Option(names = {"-m", "--most-used"}, description = "Show the most used datanodes.", defaultValue = "false") @@ -98,11 +102,12 @@ public void execute(ScmClient scmClient) throws IOException { throw new IOException("Count must be an integer greater than 0."); } - // fetch info by ip or uuid + // fetch info by ip or uuid or hostname if (!Strings.isNullOrEmpty(exclusiveArguments.ipaddress) || - !Strings.isNullOrEmpty(exclusiveArguments.uuid)) { + !Strings.isNullOrEmpty(exclusiveArguments.uuid) || + !Strings.isNullOrEmpty(exclusiveArguments.hostname)) { infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.ipaddress, - exclusiveArguments.uuid); + exclusiveArguments.uuid, exclusiveArguments.hostname); } else { // get info of most used or least used nodes infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.mostUsed, count); @@ -129,8 +134,9 @@ public void execute(ScmClient scmClient) throws IOException { private void printInfo(DatanodeUsage info) { System.out.printf("%-13s: %s %n", "UUID", info.getDatanodeDetails().getUuid()); - System.out.printf("%-13s: %s (%s) %n", "IP Address", - info.getDatanodeDetails().getIpAddress(), + System.out.printf("%-13s: %s %n", "IP Address", + info.getDatanodeDetails().getIpAddress()); + System.out.printf("%-13s: %s %n", "Hostname", info.getDatanodeDetails().getHostName()); // print capacity in a readable format System.out.printf("%-13s: %s (%s) %n", "Capacity", info.getCapacity() diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index b4ab5bf25cdd..4212e76da79d 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -27,7 +27,7 @@ List datanodes Filter list by UUID ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' - ${output} = Execute ozone admin datanode list --id "${uuid}" + ${output} = Execute ozone admin datanode list --uuid "${uuid}" Should contain ${output} Datanode: ${uuid} ${datanodes} = Get Lines Containing String ${output} Datanode: @{lines} = Split To Lines ${datanodes} diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java index 708bbc003d18..64f54e41a798 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java @@ -117,7 +117,7 @@ public void testDatanodeUsageInfoCompatibility() throws IOException { List usageInfoList = storageClient.getDatanodeUsageInfo( - dn.getIpAddress(), dn.getUuidString()); + dn.getIpAddress(), dn.getUuidString(), dn.getHostName()); for (HddsProtos.DatanodeUsageInfoProto info : usageInfoList) { assertTrue(info.getNode().getPortsList().stream() From 6db1478b1a36f0e1a5843d5e806374f2fdf70d54 Mon Sep 17 00:00:00 2001 From: Christos Bisias Date: Mon, 17 Oct 2022 20:12:20 +0300 Subject: [PATCH 02/18] getNodesByIpAddress, getNodesByHostName used only by the CLI --- .../hadoop/hdds/DFSConfigKeysLegacy.java | 3 +- .../StorageContainerLocationProtocol.java | 14 ++ ...ocationProtocolClientSideTranslatorPB.java | 15 ++ .../src/main/resources/proto.lock | 5 - .../scm/node/NodeDecommissionManager.java | 16 ++- .../hadoop/hdds/scm/node/NodeManager.java | 13 +- .../hadoop/hdds/scm/node/SCMNodeManager.java | 130 ++++++++++++++---- .../scm/server/SCMBlockProtocolServer.java | 2 +- .../scm/server/SCMClientProtocolServer.java | 16 +++ .../hdds/scm/container/MockNodeManager.java | 42 ++++-- .../scm/container/SimpleMockNodeManager.java | 7 +- .../hdds/scm/node/TestSCMNodeManager.java | 50 ++++--- .../testutils/ReplicationNodeManagerMock.java | 7 +- .../scm/cli/datanode/ListInfoSubcommand.java | 2 +- .../main/smoketest/admincli/datanode.robot | 2 +- 15 files changed, 247 insertions(+), 77 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java index 0cf1d12b5257..ef88a79dcb85 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/DFSConfigKeysLegacy.java @@ -39,10 +39,11 @@ private DFSConfigKeysLegacy() { public static final String DFS_DATANODE_DATA_DIR_KEY = "dfs.datanode.data.dir"; - @Deprecated public static final String DFS_DATANODE_USE_DN_HOSTNAME = "dfs.datanode.use.datanode.hostname"; + public static final boolean DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT = false; + public static final String DFS_XFRAME_OPTION_ENABLED = "dfs.xframe.enabled"; public static final boolean DFS_XFRAME_OPTION_ENABLED_DEFAULT = true; diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index 04cac9878560..076005656818 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -385,6 +385,20 @@ StartContainerBalancerResponseProto startContainerBalancer( */ boolean getContainerBalancerStatus() throws IOException; + /** + * Get Datanode usage information by ip or uuid. + * + * @param ipaddress datanode IP address String + * @param uuid datanode UUID String + * @param clientVersion Client's version number + * @return List of DatanodeUsageInfoProto. Each element contains info such as + * capacity, SCMused, and remaining space. + * @throws IOException + * @see org.apache.hadoop.ozone.ClientVersion + */ + List getDatanodeUsageInfo( + String ipaddress, String uuid, int clientVersion) throws IOException; + /** * Get Datanode usage information by ip or uuid or hostname. * diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java index e7b29a2d3fd0..a5aa2ff9d061 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java @@ -898,6 +898,21 @@ public boolean getContainerBalancerStatus() throws IOException { } + /** + * Builds request for datanode usage information and receives response. + * + * @param ipaddress Address String + * @param uuid UUID String + * @return List of DatanodeUsageInfoProto. Each element contains info such as + * capacity, SCMUsed, and remaining space. + * @throws IOException + */ + @Override + public List getDatanodeUsageInfo( + String ipaddress, String uuid, int clientVersion) throws IOException { + return getDatanodeUsageInfo(ipaddress, uuid, null, clientVersion); + } + /** * Builds request for datanode usage information and receives response. * diff --git a/hadoop-hdds/interface-admin/src/main/resources/proto.lock b/hadoop-hdds/interface-admin/src/main/resources/proto.lock index d8d88067487d..ec40a30649d4 100644 --- a/hadoop-hdds/interface-admin/src/main/resources/proto.lock +++ b/hadoop-hdds/interface-admin/src/main/resources/proto.lock @@ -888,11 +888,6 @@ "id": 4, "name": "count", "type": "uint32" - }, - { - "id": 5, - "name": "hostname", - "type": "string" } ] }, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java index 0bed542f263c..1ea04cdfc3ce 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hdds.scm.ha.SCMContext; import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException; import org.apache.hadoop.hdds.server.events.EventPublisher; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,6 +57,7 @@ public class NodeDecommissionManager { private EventPublisher eventQueue; private ReplicationManager replicationManager; private OzoneConfiguration conf; + private boolean useHostnames; private long monitorInterval; private static final Logger LOG = @@ -113,9 +115,13 @@ private List mapHostnamesToDatanodes(List hosts) throw new InvalidHostStringException("Unable to resolve host " + host.getRawHostname(), e); } - String dnsName = addr.getHostAddress(); - - List found = nodeManager.getNodesByIpAddress(dnsName); + String dnsName; + if (useHostnames) { + dnsName = addr.getHostName(); + } else { + dnsName = addr.getHostAddress(); + } + List found = nodeManager.getNodesByAddress(dnsName); if (found.size() == 0) { throw new InvalidHostStringException("Host " + host.getRawHostname() + " (" + dnsName + ") is not running any datanodes registered" @@ -180,6 +186,10 @@ public NodeDecommissionManager(OzoneConfiguration config, NodeManager nm, new ThreadFactoryBuilder().setNameFormat("DatanodeAdminManager-%d") .setDaemon(true).build()); + useHostnames = conf.getBoolean( + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); + monitorInterval = conf.getTimeDuration( ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL, ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL_DEFAULT, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java index 2c17b4a32eb5..b332a77965e7 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java @@ -347,13 +347,22 @@ int getNodeQueuedCommandCount(DatanodeDetails datanodeDetails, DatanodeDetails getNodeByUuid(String uuid); /** - * Given datanode address(Ipaddress), returns a list of + * Given datanode address(Ipaddress or hostname), returns a list of * DatanodeDetails for the datanodes running at that address. * * @param address datanode address * @return the given datanode, or empty list if none found */ - List getNodesByIpAddress(String address); + List getNodesByAddress(String address); + + /** + * Given datanode Ipaddress, returns a list of + * DatanodeDetails for the datanodes running at that address. + * + * @param ipAddress datanode ip address + * @return the given datanode, or empty list if none found + */ + List getNodesByIpAddress(String ipAddress); /** * Given datanode hostname, returns a list of diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index a16ba7bd99c8..970d2b62bbe4 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -121,9 +121,12 @@ public class SCMNodeManager implements NodeManager { private final SCMStorageConfig scmStorageConfig; private final NetworkTopology clusterMap; private final DNSToSwitchMapping dnsToSwitchMapping; + private final boolean useHostname; private final ConcurrentHashMap> dnsToUuidMap = new ConcurrentHashMap<>(); - private final ConcurrentHashMap> hostNmToUuidMap = + private final Map> ipAddressToUuidMap = + new ConcurrentHashMap<>(); + private final Map> hostNameToUuidMap = new ConcurrentHashMap<>(); private final int numPipelinesPerMetadataVolume; private final int heavyNodeCriteria; @@ -160,6 +163,9 @@ public SCMNodeManager(OzoneConfiguration conf, this.dnsToSwitchMapping = ((newInstance instanceof CachedDNSToSwitchMapping) ? newInstance : new CachedDNSToSwitchMapping(newInstance)); + this.useHostname = conf.getBoolean( + DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, + DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); this.numPipelinesPerMetadataVolume = conf.getInt(ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME, ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME_DEFAULT); @@ -363,14 +369,23 @@ public RegisteredCommand register( InetAddress dnAddress = Server.getRemoteIp(); if (dnAddress != null) { // Mostly called inside an RPC, update ip - datanodeDetails.setHostName(dnAddress.getHostName()); + if (!useHostname) { + datanodeDetails.setHostName(dnAddress.getHostName()); + } datanodeDetails.setIpAddress(dnAddress.getHostAddress()); } - String dnsName = datanodeDetails.getIpAddress(); + String dnsName; + String networkLocation; + String ipAddress = datanodeDetails.getIpAddress(); String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - String networkLocation = nodeResolve(dnsName); + if (useHostname) { + dnsName = datanodeDetails.getHostName(); + } else { + dnsName = datanodeDetails.getIpAddress(); + } + networkLocation = nodeResolve(dnsName); if (networkLocation != null) { datanodeDetails.setNetworkLocation(networkLocation); } @@ -382,8 +397,12 @@ public RegisteredCommand register( // Check that datanode in nodeStateManager has topology parent set DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); - addEntryToDnsToUuidMap(dnsName, datanodeDetails.getUuidString()); - addEntryToHostNmToUuidMap(hostName, datanodeDetails.getUuidString()); + addEntryToDnsToUuidMap(dnsName, + datanodeDetails.getUuidString()); + addEntryToIpAddressToUuidMap(ipAddress, + datanodeDetails.getUuidString()); + addEntryToHostNameToUuidMap(hostName, + datanodeDetails.getUuidString()); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); LOG.info("Registered Data node : {}", datanodeDetails); @@ -411,12 +430,21 @@ public RegisteredCommand register( datanodeDetails); clusterMap.update(datanodeInfo, datanodeDetails); - String oldDnsName = datanodeInfo.getIpAddress(); + String oldDnsName; + String oldIpAddress = datanodeInfo.getIpAddress(); String oldHostName = datanodeInfo.getHostName(); + if (useHostname) { + oldDnsName = datanodeInfo.getHostName(); + } else { + oldDnsName = datanodeInfo.getIpAddress(); + } updateEntryFromDnsToUuidMap(oldDnsName, dnsName, datanodeDetails.getUuidString()); - updateEntryFromHostNmToUuidMap(oldHostName, + updateEntryFromIpAddressToUuidMap(oldIpAddress, + ipAddress, + datanodeDetails.getUuidString()); + updateEntryFromHostNameToUuidMap(oldHostName, hostName, datanodeDetails.getUuidString()); nodeStateManager.updateNode(datanodeDetails, layoutInfo); @@ -458,15 +486,18 @@ private synchronized void addEntryToDnsToUuidMap( dnList.add(uuid); } - @SuppressFBWarnings(value = "AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION") - private synchronized void addEntryToHostNmToUuidMap( + private void addEntryToIpAddressToUuidMap( + String ipAddress, String uuid) { + ipAddressToUuidMap + .computeIfAbsent(ipAddress, any -> ConcurrentHashMap.newKeySet()) + .add(uuid); + } + + private void addEntryToHostNameToUuidMap( String hostName, String uuid) { - Set dnList = hostNmToUuidMap.get(hostName); - if (dnList == null) { - dnList = ConcurrentHashMap.newKeySet(); - hostNmToUuidMap.put(hostName, dnList); - } - dnList.add(uuid); + hostNameToUuidMap + .computeIfAbsent(hostName, any -> ConcurrentHashMap.newKeySet()) + .add(uuid); } private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { @@ -482,16 +513,30 @@ private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { } } - private synchronized void removeEntryFromHostNmToUuidMap(String hostName) { - if (!hostNmToUuidMap.containsKey(hostName)) { + private synchronized void removeEntryFromIpAddressToUuidMap( + String ipAddress) { + if (!ipAddressToUuidMap.containsKey(ipAddress)) { return; } - Set dnSet = hostNmToUuidMap.get(hostName); + Set dnSet = ipAddressToUuidMap.get(ipAddress); + if (dnSet.contains(ipAddress)) { + dnSet.remove(ipAddress); + } + if (dnSet.isEmpty()) { + ipAddressToUuidMap.remove(ipAddress); + } + } + + private synchronized void removeEntryFromHostNameToUuidMap(String hostName) { + if (!hostNameToUuidMap.containsKey(hostName)) { + return; + } + Set dnSet = hostNameToUuidMap.get(hostName); if (dnSet.contains(hostName)) { dnSet.remove(hostName); } if (dnSet.isEmpty()) { - hostNmToUuidMap.remove(hostName); + hostNameToUuidMap.remove(hostName); } } @@ -502,11 +547,17 @@ private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, addEntryToDnsToUuidMap(newDnsName, uuid); } - private synchronized void updateEntryFromHostNmToUuidMap(String oldHostName, - String newHostName, - String uuid) { - removeEntryFromHostNmToUuidMap(oldHostName); - addEntryToHostNmToUuidMap(newHostName, uuid); + private synchronized void updateEntryFromIpAddressToUuidMap( + String oldIpAddress, String newIpAddress, String uuid) { + removeEntryFromIpAddressToUuidMap(oldIpAddress); + addEntryToIpAddressToUuidMap(newIpAddress, uuid); + } + + private synchronized void updateEntryFromHostNameToUuidMap(String oldHostName, + String newHostName, + String uuid) { + removeEntryFromHostNameToUuidMap(oldHostName); + addEntryToHostNameToUuidMap(newHostName, uuid); } /** @@ -1238,17 +1289,36 @@ public DatanodeDetails getNodeByUuid(String uuid) { * @return the given datanode, or empty list if none found */ @Override - public List getNodesByIpAddress(String address) { - return getNodesByAddress(address, dnsToUuidMap); + public List getNodesByAddress(String address) { + return getNodesByIpOrHostname(address, dnsToUuidMap); } + /** + * Get a list of DatanodeDetails based on the ip address. + * Used only by the CLI. + * + * @param ipAddress datanode ip address + * @return + */ + @Override + public List getNodesByIpAddress(String ipAddress) { + return getNodesByIpOrHostname(ipAddress, ipAddressToUuidMap); + } + + /** + * Get a list of DatanodeDetails based on the hostname. + * Used only by the CLI. + * + * @param hostname datanode hostname address + * @return + */ @Override public List getNodesByHostName(String hostname) { - return getNodesByAddress(hostname, hostNmToUuidMap); + return getNodesByIpOrHostname(hostname, hostNameToUuidMap); } - private List getNodesByAddress( - String address, ConcurrentHashMap> addressToUuidMap) { + private List getNodesByIpOrHostname( + String address, Map> addressToUuidMap) { List results = new LinkedList<>(); if (Strings.isNullOrEmpty(address)) { LOG.warn("address is null"); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java index a30324bc4bbf..600932c23495 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMBlockProtocolServer.java @@ -328,7 +328,7 @@ public List sortDatanodes(List nodes, NodeManager nodeManager = scm.getScmNodeManager(); Node client = null; List possibleClients = - nodeManager.getNodesByIpAddress(clientMachine); + nodeManager.getNodesByAddress(clientMachine); if (possibleClients.size() > 0) { client = possibleClients.get(0); } diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java index 9b1273ecc599..25c3f00e2433 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java @@ -997,6 +997,22 @@ public boolean getContainerBalancerStatus() { return scm.getContainerBalancer().isBalancerRunning(); } + /** + * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip + * or uuid. + * + * @param ipaddress Datanode Address String + * @param uuid Datanode UUID String + * @return List of DatanodeUsageInfoProto. Each element contains usage info + * such as capacity, SCMUsed, and remaining space. + * @throws IOException if admin authentication fails + */ + @Override + public List getDatanodeUsageInfo( + String ipaddress, String uuid, int clientVersion) throws IOException { + return getDatanodeUsageInfo(ipaddress, uuid, null, clientVersion); + } + /** * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip * or uuid. diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java index 386ae1475469..375b13b30014 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java @@ -111,7 +111,8 @@ public class MockNodeManager implements NodeManager { private final Node2ContainerMap node2ContainerMap; private NetworkTopology clusterMap; private ConcurrentMap> dnsToUuidMap; - private ConcurrentMap> hostNmToUuidMap; + private ConcurrentMap> ipAddressToUuidMap; + private ConcurrentMap> hostNameToUuidMap; private int numHealthyDisksPerDatanode; private int numRaftLogDisksPerDatanode; private int numPipelinePerDatanode; @@ -124,7 +125,8 @@ public class MockNodeManager implements NodeManager { this.node2PipelineMap = new Node2PipelineMap(); this.node2ContainerMap = new Node2ContainerMap(); this.dnsToUuidMap = new ConcurrentHashMap<>(); - this.hostNmToUuidMap = new ConcurrentHashMap<>(); + this.ipAddressToUuidMap = new ConcurrentHashMap<>(); + this.hostNameToUuidMap = new ConcurrentHashMap<>(); this.aggregateStat = new SCMNodeStat(); this.clusterMap = new NetworkTopologyImpl(new OzoneConfiguration()); } @@ -729,7 +731,9 @@ public RegisteredCommand register(DatanodeDetails datanodeDetails, Collections.emptySet()); addEntryTodnsToUuidMap(datanodeDetails.getIpAddress(), datanodeDetails.getUuidString()); - addEntryToHostNmToUuidMap(datanodeDetails.getHostName(), + addEntryToIpAddressToUuidMap(datanodeDetails.getIpAddress(), + datanodeDetails.getUuidString()); + addEntryToHostNameToUuidMap(datanodeDetails.getHostName(), datanodeDetails.getUuidString()); if (clusterMap != null) { datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); @@ -758,12 +762,22 @@ private synchronized void addEntryTodnsToUuidMap( dnList.add(uuid); } - private synchronized void addEntryToHostNmToUuidMap( + private synchronized void addEntryToIpAddressToUuidMap( + String dnsName, String uuid) { + Set dnList = ipAddressToUuidMap.get(dnsName); + if (dnList == null) { + dnList = ConcurrentHashMap.newKeySet(); + ipAddressToUuidMap.put(dnsName, dnList); + } + dnList.add(uuid); + } + + private synchronized void addEntryToHostNameToUuidMap( String dnsName, String uuid) { - Set dnList = hostNmToUuidMap.get(dnsName); + Set dnList = hostNameToUuidMap.get(dnsName); if (dnList == null) { dnList = ConcurrentHashMap.newKeySet(); - hostNmToUuidMap.put(dnsName, dnList); + hostNameToUuidMap.put(dnsName, dnList); } dnList.add(uuid); } @@ -869,20 +883,24 @@ public DatanodeDetails getNodeByUuid(String uuid) { } @Override - public List getNodesByIpAddress(String address) { - return getNodesByAddress(address, dnsToUuidMap); + public List getNodesByAddress(String address) { + return getNodesByIpOrHostname(address, dnsToUuidMap); + } + + @Override + public List getNodesByIpAddress(String ipAddress) { + return getNodesByIpOrHostname(ipAddress, ipAddressToUuidMap); } @Override public List getNodesByHostName(String hostName) { - return getNodesByAddress(hostName, hostNmToUuidMap); + return getNodesByIpOrHostname(hostName, hostNameToUuidMap); } - private List getNodesByAddress( - String address, ConcurrentMap> addressToUuidMap) { + public List getNodesByIpOrHostname( + String address, Map> addressToUuidMap) { List results = new LinkedList<>(); Set uuids = addressToUuidMap.get(address); - if (uuids == null) { return results; } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java index 75ff52c20236..ac0825eafced 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java @@ -333,7 +333,12 @@ public DatanodeDetails getNodeByUuid(String uuid) { } @Override - public List getNodesByIpAddress(String address) { + public List getNodesByAddress(String address) { + return null; + } + + @Override + public List getNodesByIpAddress(String ipAddress) { return null; } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index 63d9a7d8423b..73dc7b61759c 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -33,6 +33,7 @@ import java.util.concurrent.TimeoutException; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hdds.DFSConfigKeysLegacy; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.client.RatisReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationConfig; @@ -1750,7 +1751,7 @@ public void testHandlingSCMCommandEvent() @Test public void testScmRegisterNodeWithIpAddress() throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(); + testScmRegisterNodeWithNetworkTopology(false); } /** @@ -1760,7 +1761,7 @@ public void testScmRegisterNodeWithIpAddress() @Test public void testScmRegisterNodeWithHostname() throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(); + testScmRegisterNodeWithNetworkTopology(true); } /** @@ -1770,7 +1771,7 @@ public void testScmRegisterNodeWithHostname() @Test public void testgetNodesByAddressWithIpAddress() throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(); + testGetNodesByAddress(false); } /** @@ -1779,7 +1780,7 @@ public void testgetNodesByAddressWithIpAddress() @Test public void testgetNodesByAddressWithHostname() throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(); + testGetNodesByAddress(true); } /** @@ -1827,7 +1828,7 @@ public void testScmRegisterNodeWith4LayerNetworkTopology() } } - private void testScmRegisterNodeWithNetworkTopology() + private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1843,6 +1844,9 @@ private void testScmRegisterNodeWithNetworkTopology() conf.set(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, "org.apache.hadoop.net.TableMapping"); conf.set(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, mapFile); + if (useHostname) { + conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); + } final int nodeCount = hostNames.length; // use default IP address to resolve node try (SCMNodeManager nodeManager = createNodeManager(conf)) { @@ -1864,10 +1868,13 @@ private void testScmRegisterNodeWithNetworkTopology() assertEquals("/rack1", node.getNetworkLocation())); // test get node - Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, - nodeManager.getNodesByHostName(hostname).size())); - Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, - nodeManager.getNodesByIpAddress(ip).size())); + if (useHostname) { + Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, + nodeManager.getNodesByAddress(hostname).size())); + } else { + Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, + nodeManager.getNodesByAddress(ip).size())); + } } } @@ -1936,7 +1943,7 @@ public void testGetNodeInfo() /** * Test add node into a 4-layer network topology during node register. */ - private void testGetNodesByAddress() + private void testGetNodesByAddress(boolean useHostname) throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1946,6 +1953,10 @@ private void testGetNodesByAddress() String[] hostNames = {"host1", "host1", "host2", "host3", "host4"}; String[] ipAddress = {"1.2.3.4", "1.2.3.4", "2.3.4.5", "3.4.5.6", "4.5.6.7"}; + + if (useHostname) { + conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); + } final int nodeCount = hostNames.length; try (SCMNodeManager nodeManager = createNodeManager(conf)) { for (int i = 0; i < nodeCount; i++) { @@ -1954,15 +1965,16 @@ private void testGetNodesByAddress() nodeManager.register(node, null, null); } // test get node - assertEquals(0, nodeManager.getNodesByIpAddress(null).size()); - - assertEquals(2, nodeManager.getNodesByHostName("host1").size()); - assertEquals(1, nodeManager.getNodesByHostName("host2").size()); - assertEquals(0, nodeManager.getNodesByHostName("unknown").size()); - - assertEquals(2, nodeManager.getNodesByIpAddress("1.2.3.4").size()); - assertEquals(1, nodeManager.getNodesByIpAddress("2.3.4.5").size()); - assertEquals(0, nodeManager.getNodesByIpAddress("1.9.8.7").size()); + assertEquals(0, nodeManager.getNodesByAddress(null).size()); + if (useHostname) { + assertEquals(2, nodeManager.getNodesByAddress("host1").size()); + assertEquals(1, nodeManager.getNodesByAddress("host2").size()); + assertEquals(0, nodeManager.getNodesByAddress("unknown").size()); + } else { + assertEquals(2, nodeManager.getNodesByAddress("1.2.3.4").size()); + assertEquals(1, nodeManager.getNodesByAddress("2.3.4.5").size()); + assertEquals(0, nodeManager.getNodesByAddress("1.9.8.7").size()); + } } } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java index bd5c448614b4..2f5eb09cb4a5 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java @@ -481,7 +481,12 @@ public DatanodeDetails getNodeByUuid(String address) { } @Override - public List getNodesByIpAddress(String address) { + public List getNodesByAddress(String address) { + return new LinkedList<>(); + } + + @Override + public List getNodesByIpAddress(String ipAddress) { return new LinkedList<>(); } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java index 1520ec962a80..cc012a3ae909 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java @@ -46,7 +46,7 @@ public class ListInfoSubcommand extends ScmSubcommand { defaultValue = "") private String ipaddress; - @CommandLine.Option(names = {"--uuid"}, + @CommandLine.Option(names = {"--id"}, description = "Show info by datanode UUID.", defaultValue = "") private String uuid; diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index 4212e76da79d..b4ab5bf25cdd 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -27,7 +27,7 @@ List datanodes Filter list by UUID ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' - ${output} = Execute ozone admin datanode list --uuid "${uuid}" + ${output} = Execute ozone admin datanode list --id "${uuid}" Should contain ${output} Datanode: ${uuid} ${datanodes} = Get Lines Containing String ${output} Datanode: @{lines} = Split To Lines ${datanodes} From dd1c03f4b28d605eec0ac1b8939153760a6f93fc Mon Sep 17 00:00:00 2001 From: xBis7 Date: Mon, 24 Oct 2022 20:23:13 +0300 Subject: [PATCH 03/18] --address parameter for datanode usageinfo --- .../hadoop/hdds/scm/client/ScmClient.java | 10 +- .../StorageContainerLocationProtocol.java | 22 +--- ...ocationProtocolClientSideTranslatorPB.java | 24 +--- .../src/main/proto/ScmAdminProtocol.proto | 1 - .../hadoop/hdds/scm/node/NodeManager.java | 18 --- .../hadoop/hdds/scm/node/SCMNodeManager.java | 104 +++++------------- ...ocationProtocolServerSideTranslatorPB.java | 8 +- .../scm/server/SCMClientProtocolServer.java | 32 +----- .../hdds/scm/container/MockNodeManager.java | 45 +------- .../scm/container/SimpleMockNodeManager.java | 10 -- .../testutils/ReplicationNodeManagerMock.java | 10 -- .../scm/cli/ContainerOperationClient.java | 11 +- .../scm/cli/datanode/UsageInfoSubcommand.java | 24 ++-- .../hadoop/ozone/TestContainerOperations.java | 2 +- 14 files changed, 62 insertions(+), 259 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java index d8ecc92d67f9..8da034d4e1db 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java @@ -368,18 +368,16 @@ StartContainerBalancerResponseProto startContainerBalancer( int resetDeletedBlockRetryCount(List txIDs) throws IOException; /** - * Get usage information of datanode by ipaddress or uuid or hostname. + * Get usage information of datanode by address or uuid. * - * @param ipaddress datanode ipaddress String + * @param address datanode address String * @param uuid datanode uuid String - * @param hostname datanode hostname String * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMused, and remaining space. * @throws IOException */ - List getDatanodeUsageInfo(String ipaddress, - String uuid, - String hostname) + List getDatanodeUsageInfo(String address, + String uuid) throws IOException; /** diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index 076005656818..24ec5ba6e88b 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -386,9 +386,9 @@ StartContainerBalancerResponseProto startContainerBalancer( boolean getContainerBalancerStatus() throws IOException; /** - * Get Datanode usage information by ip or uuid. + * Get Datanode usage information by ip or hostname or uuid. * - * @param ipaddress datanode IP address String + * @param address datanode IP address or Hostname String * @param uuid datanode UUID String * @param clientVersion Client's version number * @return List of DatanodeUsageInfoProto. Each element contains info such as @@ -397,23 +397,7 @@ StartContainerBalancerResponseProto startContainerBalancer( * @see org.apache.hadoop.ozone.ClientVersion */ List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException; - - /** - * Get Datanode usage information by ip or uuid or hostname. - * - * @param ipaddress datanode IP address String - * @param uuid datanode UUID String - * @param hostname datanode hostname address String - * @param clientVersion Client's version number - * @return List of DatanodeUsageInfoProto. Each element contains info such as - * capacity, SCMused, and remaining space. - * @throws IOException - * @see org.apache.hadoop.ozone.ClientVersion - */ - List getDatanodeUsageInfo( - String ipaddress, String uuid, - String hostname, int clientVersion) throws IOException; + String address, String uuid, int clientVersion) throws IOException; /** * Get usage information of most or least used datanodes. diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java index a5aa2ff9d061..81efce6f6c36 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java @@ -901,7 +901,7 @@ public boolean getContainerBalancerStatus() throws IOException { /** * Builds request for datanode usage information and receives response. * - * @param ipaddress Address String + * @param address Address String * @param uuid UUID String * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMUsed, and remaining space. @@ -909,30 +909,12 @@ public boolean getContainerBalancerStatus() throws IOException { */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException { - return getDatanodeUsageInfo(ipaddress, uuid, null, clientVersion); - } - - /** - * Builds request for datanode usage information and receives response. - * - * @param ipaddress Address String - * @param uuid UUID String - * @param hostname Hostname String - * @return List of DatanodeUsageInfoProto. Each element contains info such as - * capacity, SCMUsed, and remaining space. - * @throws IOException - */ - @Override - public List getDatanodeUsageInfo( - String ipaddress, String uuid, - String hostname, int clientVersion) throws IOException { + String address, String uuid, int clientVersion) throws IOException { DatanodeUsageInfoRequestProto request = DatanodeUsageInfoRequestProto.newBuilder() - .setIpaddress(ipaddress) + .setIpaddress(address) .setUuid(uuid) - .setHostname(hostname) .build(); DatanodeUsageInfoResponseProto response = diff --git a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto index 0ba6e7696d8c..ccb5e2155e44 100644 --- a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto +++ b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto @@ -319,7 +319,6 @@ message DatanodeUsageInfoRequestProto { optional string uuid = 2; optional bool mostUsed = 3; optional uint32 count = 4; - optional string hostname = 5; } message DatanodeUsageInfoResponseProto { diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java index b332a77965e7..8f72375bcd4f 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java @@ -355,24 +355,6 @@ int getNodeQueuedCommandCount(DatanodeDetails datanodeDetails, */ List getNodesByAddress(String address); - /** - * Given datanode Ipaddress, returns a list of - * DatanodeDetails for the datanodes running at that address. - * - * @param ipAddress datanode ip address - * @return the given datanode, or empty list if none found - */ - List getNodesByIpAddress(String ipAddress); - - /** - * Given datanode hostname, returns a list of - * DatanodeDetails for the datanodes running at that hostname. - * - * @param hostname datanode hostname address - * @return the given datanode, or empty list if none found - */ - List getNodesByHostName(String hostname); - /** * Get cluster map as in network topology for this node manager. * @return cluster map diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 970d2b62bbe4..927ed998dd3b 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -124,9 +124,7 @@ public class SCMNodeManager implements NodeManager { private final boolean useHostname; private final ConcurrentHashMap> dnsToUuidMap = new ConcurrentHashMap<>(); - private final Map> ipAddressToUuidMap = - new ConcurrentHashMap<>(); - private final Map> hostNameToUuidMap = + private final Map> addressToUuidMap = new ConcurrentHashMap<>(); private final int numPipelinesPerMetadataVolume; private final int heavyNodeCriteria; @@ -399,9 +397,9 @@ public RegisteredCommand register( Preconditions.checkState(dn.getParent() != null); addEntryToDnsToUuidMap(dnsName, datanodeDetails.getUuidString()); - addEntryToIpAddressToUuidMap(ipAddress, + addEntryToAddressToUuidMap(ipAddress, datanodeDetails.getUuidString()); - addEntryToHostNameToUuidMap(hostName, + addEntryToAddressToUuidMap(hostName, datanodeDetails.getUuidString()); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); @@ -441,10 +439,10 @@ public RegisteredCommand register( updateEntryFromDnsToUuidMap(oldDnsName, dnsName, datanodeDetails.getUuidString()); - updateEntryFromIpAddressToUuidMap(oldIpAddress, + updateEntryFromAddressToUuidMap(oldIpAddress, ipAddress, datanodeDetails.getUuidString()); - updateEntryFromHostNameToUuidMap(oldHostName, + updateEntryFromAddressToUuidMap(oldHostName, hostName, datanodeDetails.getUuidString()); nodeStateManager.updateNode(datanodeDetails, layoutInfo); @@ -486,17 +484,15 @@ private synchronized void addEntryToDnsToUuidMap( dnList.add(uuid); } - private void addEntryToIpAddressToUuidMap( - String ipAddress, String uuid) { - ipAddressToUuidMap - .computeIfAbsent(ipAddress, any -> ConcurrentHashMap.newKeySet()) - .add(uuid); - } - - private void addEntryToHostNameToUuidMap( - String hostName, String uuid) { - hostNameToUuidMap - .computeIfAbsent(hostName, any -> ConcurrentHashMap.newKeySet()) + /** + * + * @param address + * @param uuid + */ + private void addEntryToAddressToUuidMap( + String address, String uuid) { + addressToUuidMap + .computeIfAbsent(address, any -> ConcurrentHashMap.newKeySet()) .add(uuid); } @@ -513,30 +509,17 @@ private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { } } - private synchronized void removeEntryFromIpAddressToUuidMap( - String ipAddress) { - if (!ipAddressToUuidMap.containsKey(ipAddress)) { + private synchronized void removeEntryFromAddressToUuidMap( + String address) { + if (!addressToUuidMap.containsKey(address)) { return; } - Set dnSet = ipAddressToUuidMap.get(ipAddress); - if (dnSet.contains(ipAddress)) { - dnSet.remove(ipAddress); + Set dnSet = addressToUuidMap.get(address); + if (dnSet.contains(address)) { + dnSet.remove(address); } if (dnSet.isEmpty()) { - ipAddressToUuidMap.remove(ipAddress); - } - } - - private synchronized void removeEntryFromHostNameToUuidMap(String hostName) { - if (!hostNameToUuidMap.containsKey(hostName)) { - return; - } - Set dnSet = hostNameToUuidMap.get(hostName); - if (dnSet.contains(hostName)) { - dnSet.remove(hostName); - } - if (dnSet.isEmpty()) { - hostNameToUuidMap.remove(hostName); + addressToUuidMap.remove(address); } } @@ -547,17 +530,11 @@ private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, addEntryToDnsToUuidMap(newDnsName, uuid); } - private synchronized void updateEntryFromIpAddressToUuidMap( - String oldIpAddress, String newIpAddress, String uuid) { - removeEntryFromIpAddressToUuidMap(oldIpAddress); - addEntryToIpAddressToUuidMap(newIpAddress, uuid); - } - - private synchronized void updateEntryFromHostNameToUuidMap(String oldHostName, - String newHostName, - String uuid) { - removeEntryFromHostNameToUuidMap(oldHostName); - addEntryToHostNameToUuidMap(newHostName, uuid); + private synchronized void updateEntryFromAddressToUuidMap(String oldAddress, + String newAddress, + String uuid) { + removeEntryFromAddressToUuidMap(oldAddress); + addEntryToAddressToUuidMap(newAddress, uuid); } /** @@ -1290,35 +1267,6 @@ public DatanodeDetails getNodeByUuid(String uuid) { */ @Override public List getNodesByAddress(String address) { - return getNodesByIpOrHostname(address, dnsToUuidMap); - } - - /** - * Get a list of DatanodeDetails based on the ip address. - * Used only by the CLI. - * - * @param ipAddress datanode ip address - * @return - */ - @Override - public List getNodesByIpAddress(String ipAddress) { - return getNodesByIpOrHostname(ipAddress, ipAddressToUuidMap); - } - - /** - * Get a list of DatanodeDetails based on the hostname. - * Used only by the CLI. - * - * @param hostname datanode hostname address - * @return - */ - @Override - public List getNodesByHostName(String hostname) { - return getNodesByIpOrHostname(hostname, hostNameToUuidMap); - } - - private List getNodesByIpOrHostname( - String address, Map> addressToUuidMap) { List results = new LinkedList<>(); if (Strings.isNullOrEmpty(address)) { LOG.warn("address is null"); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java index ec0638995d15..e59c984174f5 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java @@ -1126,12 +1126,10 @@ public DatanodeUsageInfoResponseProto getDatanodeUsageInfo( request, int clientVersion) throws IOException { List infoList; - // get info by ip or uuid or hostname - if (request.hasUuid() || - request.hasIpaddress() || - request.hasHostname()) { + // get info by ip or uuid + if (request.hasUuid() || request.hasIpaddress()) { infoList = impl.getDatanodeUsageInfo(request.getIpaddress(), - request.getUuid(), request.getHostname(), clientVersion); + request.getUuid(), clientVersion); } else { // get most or least used nodes infoList = impl.getDatanodeUsageInfo(request.getMostUsed(), request.getCount(), clientVersion); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java index 25c3f00e2433..7913f2eb9a3e 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java @@ -999,9 +999,9 @@ public boolean getContainerBalancerStatus() { /** * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip - * or uuid. + * or hostname or uuid. * - * @param ipaddress Datanode Address String + * @param address Datanode Address String * @param uuid Datanode UUID String * @return List of DatanodeUsageInfoProto. Each element contains usage info * such as capacity, SCMUsed, and remaining space. @@ -1009,25 +1009,7 @@ public boolean getContainerBalancerStatus() { */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid, int clientVersion) throws IOException { - return getDatanodeUsageInfo(ipaddress, uuid, null, clientVersion); - } - - /** - * Get Datanode usage info such as capacity, SCMUsed, and remaining by ip - * or uuid. - * - * @param ipaddress Datanode Address IP String - * @param uuid Datanode UUID String - * @param hostname Datanode Address hostname String - * @return List of DatanodeUsageInfoProto. Each element contains usage info - * such as capacity, SCMUsed, and remaining space. - * @throws IOException if admin authentication fails - */ - @Override - public List getDatanodeUsageInfo( - String ipaddress, String uuid, - String hostname, int clientVersion) throws IOException { + String address, String uuid, int clientVersion) throws IOException { // check admin authorisation try { @@ -1037,14 +1019,12 @@ public List getDatanodeUsageInfo( throw e; } - // get datanodes by ip or uuid or hostname + // get datanodes by ip or uuid List nodes = new ArrayList<>(); if (!Strings.isNullOrEmpty(uuid)) { nodes.add(scm.getScmNodeManager().getNodeByUuid(uuid)); - } else if (!Strings.isNullOrEmpty(ipaddress)) { - nodes = scm.getScmNodeManager().getNodesByIpAddress(ipaddress); - } else if (!Strings.isNullOrEmpty(hostname)) { - nodes = scm.getScmNodeManager().getNodesByHostName(hostname); + } else if (!Strings.isNullOrEmpty(address)) { + nodes = scm.getScmNodeManager().getNodesByAddress(address); } else { throw new IOException( "Could not get datanode with the specified parameters." diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java index 375b13b30014..e1eaf251f51c 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java @@ -111,8 +111,6 @@ public class MockNodeManager implements NodeManager { private final Node2ContainerMap node2ContainerMap; private NetworkTopology clusterMap; private ConcurrentMap> dnsToUuidMap; - private ConcurrentMap> ipAddressToUuidMap; - private ConcurrentMap> hostNameToUuidMap; private int numHealthyDisksPerDatanode; private int numRaftLogDisksPerDatanode; private int numPipelinePerDatanode; @@ -125,8 +123,6 @@ public class MockNodeManager implements NodeManager { this.node2PipelineMap = new Node2PipelineMap(); this.node2ContainerMap = new Node2ContainerMap(); this.dnsToUuidMap = new ConcurrentHashMap<>(); - this.ipAddressToUuidMap = new ConcurrentHashMap<>(); - this.hostNameToUuidMap = new ConcurrentHashMap<>(); this.aggregateStat = new SCMNodeStat(); this.clusterMap = new NetworkTopologyImpl(new OzoneConfiguration()); } @@ -731,10 +727,6 @@ public RegisteredCommand register(DatanodeDetails datanodeDetails, Collections.emptySet()); addEntryTodnsToUuidMap(datanodeDetails.getIpAddress(), datanodeDetails.getUuidString()); - addEntryToIpAddressToUuidMap(datanodeDetails.getIpAddress(), - datanodeDetails.getUuidString()); - addEntryToHostNameToUuidMap(datanodeDetails.getHostName(), - datanodeDetails.getUuidString()); if (clusterMap != null) { datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); clusterMap.add(datanodeDetails); @@ -762,26 +754,6 @@ private synchronized void addEntryTodnsToUuidMap( dnList.add(uuid); } - private synchronized void addEntryToIpAddressToUuidMap( - String dnsName, String uuid) { - Set dnList = ipAddressToUuidMap.get(dnsName); - if (dnList == null) { - dnList = ConcurrentHashMap.newKeySet(); - ipAddressToUuidMap.put(dnsName, dnList); - } - dnList.add(uuid); - } - - private synchronized void addEntryToHostNameToUuidMap( - String dnsName, String uuid) { - Set dnList = hostNameToUuidMap.get(dnsName); - if (dnList == null) { - dnList = ConcurrentHashMap.newKeySet(); - hostNameToUuidMap.put(dnsName, dnList); - } - dnList.add(uuid); - } - /** * Send heartbeat to indicate the datanode is alive and doing well. * @@ -884,23 +856,8 @@ public DatanodeDetails getNodeByUuid(String uuid) { @Override public List getNodesByAddress(String address) { - return getNodesByIpOrHostname(address, dnsToUuidMap); - } - - @Override - public List getNodesByIpAddress(String ipAddress) { - return getNodesByIpOrHostname(ipAddress, ipAddressToUuidMap); - } - - @Override - public List getNodesByHostName(String hostName) { - return getNodesByIpOrHostname(hostName, hostNameToUuidMap); - } - - public List getNodesByIpOrHostname( - String address, Map> addressToUuidMap) { List results = new LinkedList<>(); - Set uuids = addressToUuidMap.get(address); + Set uuids = dnsToUuidMap.get(address); if (uuids == null) { return results; } diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java index ac0825eafced..22e01b977093 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java @@ -337,16 +337,6 @@ public List getNodesByAddress(String address) { return null; } - @Override - public List getNodesByIpAddress(String ipAddress) { - return null; - } - - @Override - public List getNodesByHostName(String hostName) { - return null; - } - @Override public NetworkTopology getClusterNetworkTopologyMap() { return null; diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java index 2f5eb09cb4a5..436bccb09db8 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java @@ -485,16 +485,6 @@ public List getNodesByAddress(String address) { return new LinkedList<>(); } - @Override - public List getNodesByIpAddress(String ipAddress) { - return new LinkedList<>(); - } - - @Override - public List getNodesByHostName(String hostName) { - return new LinkedList<>(); - } - @Override public NetworkTopology getClusterNetworkTopologyMap() { return null; diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java index c0c98e1a212a..5ec3e7fa03f5 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java @@ -470,20 +470,19 @@ public int resetDeletedBlockRetryCount(List txIDs) throws IOException { } /** - * Get Datanode Usage information by ipaddress or uuid or hostname. + * Get Datanode Usage information by address or uuid. * - * @param ipaddress datanode ipaddress String + * @param address datanode address String * @param uuid datanode uuid String - * @param hostname datanode hostname String * @return List of DatanodeUsageInfoProto. Each element contains info such as * capacity, SCMused, and remaining space. * @throws IOException */ @Override public List getDatanodeUsageInfo( - String ipaddress, String uuid, String hostname) throws IOException { - return storageContainerLocationClient.getDatanodeUsageInfo(ipaddress, - uuid, hostname, ClientVersion.CURRENT_VERSION); + String address, String uuid) throws IOException { + return storageContainerLocationClient.getDatanodeUsageInfo(address, + uuid, ClientVersion.CURRENT_VERSION); } @Override diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java index 08aff8d06be9..e080eddfb03d 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java @@ -45,7 +45,7 @@ name = "usageinfo", description = "List usage information " + "(such as Capacity, SCMUsed, Remaining) of a datanode by IP address " + - "or UUID or Host name", + "or Host name or UUID", mixinStandardHelpOptions = true, versionProvider = HddsVersionProvider.class) public class UsageInfoSubcommand extends ScmSubcommand { @@ -61,18 +61,15 @@ public class UsageInfoSubcommand extends ScmSubcommand { private ExclusiveArguments exclusiveArguments; private static class ExclusiveArguments { - @CommandLine.Option(names = {"--ip"}, paramLabel = "IP", description = - "Show info by datanode ip address.", defaultValue = "") - private String ipaddress; + @CommandLine.Option(names = {"--address"}, paramLabel = "ADDRESS", + description = "Show info by datanode ip or hostname address.", + defaultValue = "") + private String address; @CommandLine.Option(names = {"--uuid"}, paramLabel = "UUID", description = "Show info by datanode UUID.", defaultValue = "") private String uuid; - @CommandLine.Option(names = {"--hostname"}, paramLabel = "HOSTNAME", - description = "Show info by datanode hostname.", defaultValue = "") - private String hostname; - @CommandLine.Option(names = {"-m", "--most-used"}, description = "Show the most used datanodes.", defaultValue = "false") @@ -102,12 +99,11 @@ public void execute(ScmClient scmClient) throws IOException { throw new IOException("Count must be an integer greater than 0."); } - // fetch info by ip or uuid or hostname - if (!Strings.isNullOrEmpty(exclusiveArguments.ipaddress) || - !Strings.isNullOrEmpty(exclusiveArguments.uuid) || - !Strings.isNullOrEmpty(exclusiveArguments.hostname)) { - infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.ipaddress, - exclusiveArguments.uuid, exclusiveArguments.hostname); + // fetch info by ip or hostname or uuid + if (!Strings.isNullOrEmpty(exclusiveArguments.address) || + !Strings.isNullOrEmpty(exclusiveArguments.uuid)) { + infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.address, + exclusiveArguments.uuid); } else { // get info of most used or least used nodes infoList = scmClient.getDatanodeUsageInfo(exclusiveArguments.mostUsed, count); diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java index 64f54e41a798..708bbc003d18 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java @@ -117,7 +117,7 @@ public void testDatanodeUsageInfoCompatibility() throws IOException { List usageInfoList = storageClient.getDatanodeUsageInfo( - dn.getIpAddress(), dn.getUuidString(), dn.getHostName()); + dn.getIpAddress(), dn.getUuidString()); for (HddsProtos.DatanodeUsageInfoProto info : usageInfoList) { assertTrue(info.getNode().getPortsList().stream() From e9e531713f7a6a2b313d5223756dac8956a33a3c Mon Sep 17 00:00:00 2001 From: xBis7 Date: Wed, 26 Oct 2022 19:15:21 +0300 Subject: [PATCH 04/18] removing datanode_use_hostname flag --- .../scm/node/NodeDecommissionManager.java | 18 ++-- .../hadoop/hdds/scm/node/SCMNodeManager.java | 88 ++++--------------- .../hdds/scm/node/TestSCMNodeManager.java | 81 ++++------------- 3 files changed, 44 insertions(+), 143 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java index 1ea04cdfc3ce..96e08cf29fc3 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java @@ -57,7 +57,6 @@ public class NodeDecommissionManager { private EventPublisher eventQueue; private ReplicationManager replicationManager; private OzoneConfiguration conf; - private boolean useHostnames; private long monitorInterval; private static final Logger LOG = @@ -116,12 +115,17 @@ private List mapHostnamesToDatanodes(List hosts) + host.getRawHostname(), e); } String dnsName; - if (useHostnames) { - dnsName = addr.getHostName(); + String hostName = addr.getHostName(); + String hostAddress = addr.getHostAddress(); + List found = nodeManager.getNodesByAddress(hostAddress); + + if (found.size() == 0) { + found = nodeManager.getNodesByAddress(hostName); + dnsName = hostName; } else { - dnsName = addr.getHostAddress(); + dnsName = hostAddress; } - List found = nodeManager.getNodesByAddress(dnsName); + if (found.size() == 0) { throw new InvalidHostStringException("Host " + host.getRawHostname() + " (" + dnsName + ") is not running any datanodes registered" @@ -186,10 +190,6 @@ public NodeDecommissionManager(OzoneConfiguration config, NodeManager nm, new ThreadFactoryBuilder().setNameFormat("DatanodeAdminManager-%d") .setDaemon(true).build()); - useHostnames = conf.getBoolean( - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); - monitorInterval = conf.getTimeDuration( ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL, ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL_DEFAULT, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 927ed998dd3b..2e29ec5fc9b0 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -20,7 +20,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.hadoop.hdds.DFSConfigKeysLegacy; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; @@ -121,11 +120,8 @@ public class SCMNodeManager implements NodeManager { private final SCMStorageConfig scmStorageConfig; private final NetworkTopology clusterMap; private final DNSToSwitchMapping dnsToSwitchMapping; - private final boolean useHostname; private final ConcurrentHashMap> dnsToUuidMap = new ConcurrentHashMap<>(); - private final Map> addressToUuidMap = - new ConcurrentHashMap<>(); private final int numPipelinesPerMetadataVolume; private final int heavyNodeCriteria; private final HDDSLayoutVersionManager scmLayoutVersionManager; @@ -161,9 +157,6 @@ public SCMNodeManager(OzoneConfiguration conf, this.dnsToSwitchMapping = ((newInstance instanceof CachedDNSToSwitchMapping) ? newInstance : new CachedDNSToSwitchMapping(newInstance)); - this.useHostname = conf.getBoolean( - DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, - DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); this.numPipelinesPerMetadataVolume = conf.getInt(ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME, ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME_DEFAULT); @@ -367,23 +360,23 @@ public RegisteredCommand register( InetAddress dnAddress = Server.getRemoteIp(); if (dnAddress != null) { // Mostly called inside an RPC, update ip - if (!useHostname) { - datanodeDetails.setHostName(dnAddress.getHostName()); - } + datanodeDetails.setHostName(dnAddress.getHostName()); + datanodeDetails.setIpAddress(dnAddress.getHostAddress()); } - String dnsName; String networkLocation; String ipAddress = datanodeDetails.getIpAddress(); String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - if (useHostname) { - dnsName = datanodeDetails.getHostName(); - } else { - dnsName = datanodeDetails.getIpAddress(); + + // Firstly, use ip address + networkLocation = nodeResolve(ipAddress); + + // If null, use hostname + if (networkLocation == null) { + networkLocation = nodeResolve(hostName); } - networkLocation = nodeResolve(dnsName); if (networkLocation != null) { datanodeDetails.setNetworkLocation(networkLocation); } @@ -395,11 +388,9 @@ public RegisteredCommand register( // Check that datanode in nodeStateManager has topology parent set DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); - addEntryToDnsToUuidMap(dnsName, - datanodeDetails.getUuidString()); - addEntryToAddressToUuidMap(ipAddress, + addEntryToDnsToUuidMap(ipAddress, datanodeDetails.getUuidString()); - addEntryToAddressToUuidMap(hostName, + addEntryToDnsToUuidMap(hostName, datanodeDetails.getUuidString()); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); @@ -428,23 +419,15 @@ public RegisteredCommand register( datanodeDetails); clusterMap.update(datanodeInfo, datanodeDetails); - String oldDnsName; String oldIpAddress = datanodeInfo.getIpAddress(); String oldHostName = datanodeInfo.getHostName(); - if (useHostname) { - oldDnsName = datanodeInfo.getHostName(); - } else { - oldDnsName = datanodeInfo.getIpAddress(); - } - updateEntryFromDnsToUuidMap(oldDnsName, - dnsName, - datanodeDetails.getUuidString()); - updateEntryFromAddressToUuidMap(oldIpAddress, + updateEntryFromDnsToUuidMap(oldIpAddress, ipAddress, datanodeDetails.getUuidString()); - updateEntryFromAddressToUuidMap(oldHostName, + updateEntryFromDnsToUuidMap(oldHostName, hostName, datanodeDetails.getUuidString()); + nodeStateManager.updateNode(datanodeDetails, layoutInfo); DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); @@ -473,26 +456,10 @@ public RegisteredCommand register( * @param dnsName String representing the hostname or IP of the node * @param uuid String representing the UUID of the registered node. */ - @SuppressFBWarnings(value = "AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION") private synchronized void addEntryToDnsToUuidMap( String dnsName, String uuid) { - Set dnList = dnsToUuidMap.get(dnsName); - if (dnList == null) { - dnList = ConcurrentHashMap.newKeySet(); - dnsToUuidMap.put(dnsName, dnList); - } - dnList.add(uuid); - } - - /** - * - * @param address - * @param uuid - */ - private void addEntryToAddressToUuidMap( - String address, String uuid) { - addressToUuidMap - .computeIfAbsent(address, any -> ConcurrentHashMap.newKeySet()) + dnsToUuidMap + .computeIfAbsent(dnsName, any -> ConcurrentHashMap.newKeySet()) .add(uuid); } @@ -509,20 +476,6 @@ private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { } } - private synchronized void removeEntryFromAddressToUuidMap( - String address) { - if (!addressToUuidMap.containsKey(address)) { - return; - } - Set dnSet = addressToUuidMap.get(address); - if (dnSet.contains(address)) { - dnSet.remove(address); - } - if (dnSet.isEmpty()) { - addressToUuidMap.remove(address); - } - } - private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, String newDnsName, String uuid) { @@ -530,13 +483,6 @@ private synchronized void updateEntryFromDnsToUuidMap(String oldDnsName, addEntryToDnsToUuidMap(newDnsName, uuid); } - private synchronized void updateEntryFromAddressToUuidMap(String oldAddress, - String newAddress, - String uuid) { - removeEntryFromAddressToUuidMap(oldAddress); - addEntryToAddressToUuidMap(newAddress, uuid); - } - /** * Send heartbeat to indicate the datanode is alive and doing well. * @@ -1272,7 +1218,7 @@ public List getNodesByAddress(String address) { LOG.warn("address is null"); return results; } - Set uuids = addressToUuidMap.get(address); + Set uuids = dnsToUuidMap.get(address); if (uuids == null) { LOG.warn("Cannot find node for address {}", address); return results; diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index 73dc7b61759c..bd3eb81779b5 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -1744,45 +1744,6 @@ public void testHandlingSCMCommandEvent() } } - /** - * Test add node into network topology during node register. Datanode - * uses Ip address to resolve network location. - */ - @Test - public void testScmRegisterNodeWithIpAddress() - throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(false); - } - - /** - * Test add node into network topology during node register. Datanode - * uses hostname to resolve network location. - */ - @Test - public void testScmRegisterNodeWithHostname() - throws IOException, InterruptedException, AuthenticationException { - testScmRegisterNodeWithNetworkTopology(true); - } - - /** - * Test getNodesByAddress when using IPs. - * - */ - @Test - public void testgetNodesByAddressWithIpAddress() - throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(false); - } - - /** - * Test getNodesByAddress when using hostnames. - */ - @Test - public void testgetNodesByAddressWithHostname() - throws IOException, InterruptedException, AuthenticationException { - testGetNodesByAddress(true); - } - /** * Test add node into a 4-layer network topology during node register. */ @@ -1828,7 +1789,8 @@ public void testScmRegisterNodeWith4LayerNetworkTopology() } } - private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) + @Test + public void testScmRegisterNodeWithNetworkTopology() throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1844,9 +1806,7 @@ private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) conf.set(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, "org.apache.hadoop.net.TableMapping"); conf.set(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, mapFile); - if (useHostname) { - conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); - } + final int nodeCount = hostNames.length; // use default IP address to resolve node try (SCMNodeManager nodeManager = createNodeManager(conf)) { @@ -1868,13 +1828,11 @@ private void testScmRegisterNodeWithNetworkTopology(boolean useHostname) assertEquals("/rack1", node.getNetworkLocation())); // test get node - if (useHostname) { - Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, - nodeManager.getNodesByAddress(hostname).size())); - } else { - Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, - nodeManager.getNodesByAddress(ip).size())); - } + Arrays.stream(hostNames).forEach(hostname -> assertNotEquals(0, + nodeManager.getNodesByAddress(hostname).size())); + + Arrays.stream(ipAddress).forEach(ip -> assertNotEquals(0, + nodeManager.getNodesByAddress(ip).size())); } } @@ -1943,7 +1901,8 @@ public void testGetNodeInfo() /** * Test add node into a 4-layer network topology during node register. */ - private void testGetNodesByAddress(boolean useHostname) + @Test + public void testGetNodesByAddress() throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1954,9 +1913,6 @@ private void testGetNodesByAddress(boolean useHostname) String[] ipAddress = {"1.2.3.4", "1.2.3.4", "2.3.4.5", "3.4.5.6", "4.5.6.7"}; - if (useHostname) { - conf.set(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, "true"); - } final int nodeCount = hostNames.length; try (SCMNodeManager nodeManager = createNodeManager(conf)) { for (int i = 0; i < nodeCount; i++) { @@ -1966,15 +1922,14 @@ private void testGetNodesByAddress(boolean useHostname) } // test get node assertEquals(0, nodeManager.getNodesByAddress(null).size()); - if (useHostname) { - assertEquals(2, nodeManager.getNodesByAddress("host1").size()); - assertEquals(1, nodeManager.getNodesByAddress("host2").size()); - assertEquals(0, nodeManager.getNodesByAddress("unknown").size()); - } else { - assertEquals(2, nodeManager.getNodesByAddress("1.2.3.4").size()); - assertEquals(1, nodeManager.getNodesByAddress("2.3.4.5").size()); - assertEquals(0, nodeManager.getNodesByAddress("1.9.8.7").size()); - } + + assertEquals(2, nodeManager.getNodesByAddress("host1").size()); + assertEquals(1, nodeManager.getNodesByAddress("host2").size()); + assertEquals(0, nodeManager.getNodesByAddress("unknown").size()); + + assertEquals(2, nodeManager.getNodesByAddress("1.2.3.4").size()); + assertEquals(1, nodeManager.getNodesByAddress("2.3.4.5").size()); + assertEquals(0, nodeManager.getNodesByAddress("1.9.8.7").size()); } } From 91d1608cd45fd778dcdb4bd48be94e3608d230e5 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Wed, 26 Oct 2022 20:18:55 +0300 Subject: [PATCH 05/18] checkstyle errors fixed --- .../org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java | 1 - .../java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java | 1 - 2 files changed, 2 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java index 96e08cf29fc3..268c0b27309c 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hdds.scm.ha.SCMContext; import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException; import org.apache.hadoop.hdds.server.events.EventPublisher; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index bd3eb81779b5..923d83960af8 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -33,7 +33,6 @@ import java.util.concurrent.TimeoutException; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdds.DFSConfigKeysLegacy; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.client.RatisReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationConfig; From dc6a42158e5b5eeb910de55e0dad46bdc51979f4 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Mon, 31 Oct 2022 17:58:13 +0200 Subject: [PATCH 06/18] test node register with updated ip or hostname --- .../apache/hadoop/hdds/ratis/RatisHelper.java | 23 ++++-------- .../hadoop/hdds/scm/node/SCMNodeManager.java | 17 +++++---- .../hdds/scm/node/TestSCMNodeManager.java | 35 ++++++++++++++++--- 3 files changed, 45 insertions(+), 30 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java index be6076a9183b..92be63940888 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java @@ -29,9 +29,9 @@ import java.util.function.BiFunction; import java.util.stream.Collectors; +import com.google.common.base.Strings; import org.apache.hadoop.hdds.StringUtils; import org.apache.hadoop.hdds.conf.ConfigurationSource; -import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails.Port; import org.apache.hadoop.hdds.ratis.conf.RatisClientConfig; @@ -40,7 +40,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.security.x509.SecurityConfig; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.ratis.RaftConfigKeys; import org.apache.ratis.client.RaftClient; import org.apache.ratis.client.RaftClientConfigKeys; @@ -67,8 +66,6 @@ public final class RatisHelper { private static final Logger LOG = LoggerFactory.getLogger(RatisHelper.class); - private static final OzoneConfiguration CONF = new OzoneConfiguration(); - // Prefix for Ratis Server GRPC and Ratis client conf. public static final String HDDS_DATANODE_RATIS_PREFIX_KEY = "hdds.ratis"; @@ -101,18 +98,16 @@ public static UUID toDatanodeId(RaftProtos.RaftPeerProto peerId) { } private static String toRaftPeerAddress(DatanodeDetails id, Port.Name port) { - if (datanodeUseHostName()) { - final String address = - id.getHostName() + ":" + id.getPort(port).getValue(); + final String address; + if (Strings.isNullOrEmpty(id.getIpAddress())) { + address = id.getHostName() + ":" + id.getPort(port).getValue(); LOG.debug("Datanode is using hostname for raft peer address: {}", address); - return address; } else { - final String address = - id.getIpAddress() + ":" + id.getPort(port).getValue(); + address = id.getIpAddress() + ":" + id.getPort(port).getValue(); LOG.debug("Datanode is using IP for raft peer address: {}", address); - return address; } + return address; } public static RaftPeerId toRaftPeerId(DatanodeDetails id) { @@ -384,12 +379,6 @@ public static Long getMinReplicatedIndex( .min(Long::compareTo).orElse(null); } - private static boolean datanodeUseHostName() { - return CONF.getBoolean( - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, - DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); - } - private static Class getClass(String name, Class xface) { try { diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 2e29ec5fc9b0..e4481878c881 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -369,13 +369,10 @@ public RegisteredCommand register( String ipAddress = datanodeDetails.getIpAddress(); String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - - // Firstly, use ip address - networkLocation = nodeResolve(ipAddress); - - // If null, use hostname - if (networkLocation == null) { + if (Strings.isNullOrEmpty(ipAddress)) { networkLocation = nodeResolve(hostName); + } else { + networkLocation = nodeResolve(ipAddress); } if (networkLocation != null) { datanodeDetails.setNetworkLocation(networkLocation); @@ -458,9 +455,11 @@ public RegisteredCommand register( */ private synchronized void addEntryToDnsToUuidMap( String dnsName, String uuid) { - dnsToUuidMap - .computeIfAbsent(dnsName, any -> ConcurrentHashMap.newKeySet()) - .add(uuid); + if (!Strings.isNullOrEmpty(dnsName)) { + dnsToUuidMap + .computeIfAbsent(dnsName, any -> ConcurrentHashMap.newKeySet()) + .add(uuid); + } } private synchronized void removeEntryFromDnsToUuidMap(String dnsName) { diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index 923d83960af8..edd1996847c1 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -1936,7 +1936,36 @@ public void testGetNodesByAddress() * Test node register with updated IP and host name. */ @Test - public void testScmRegisterNodeWithUpdatedIpAndHostname() + public void testNodeWithUpdatedIpAndHostname() + throws IOException, InterruptedException, AuthenticationException { + String updatedIpAddress = "2.3.4.5"; + String updatedHostName = "host2"; + testScmRegisterNodeWithUpdatedIpAndHostname( + updatedIpAddress, updatedHostName); + } + + /** + * Test node register with no IP and updated host name. + */ + @Test + public void testNodeWithNoIpAndUpdatedHostname() + throws IOException, InterruptedException, AuthenticationException { + String updatedHostName = "host2"; + testScmRegisterNodeWithUpdatedIpAndHostname(null, updatedHostName); + } + + /** + * Test node register with updated IP and no host name. + */ + @Test + public void testNodeWithUpdatedIpAndNoHostname() + throws IOException, InterruptedException, AuthenticationException { + String updatedIpAddress = "2.3.4.5"; + testScmRegisterNodeWithUpdatedIpAndHostname(updatedIpAddress, null); + } + + public void testScmRegisterNodeWithUpdatedIpAndHostname( + String updatedIpAddress, String updatedHostName) throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -1977,9 +2006,7 @@ public void testScmRegisterNodeWithUpdatedIpAndHostname() .startsWith("/rack1/ng")); assertTrue(returnedNode.getParent() != null); - // test updating ip address and host name - String updatedIpAddress = "2.3.4.5"; - String updatedHostName = "host2"; + // updating ip address and host name DatanodeDetails updatedNode = createDatanodeDetails( nodeUuid, updatedHostName, updatedIpAddress, null); nodeManager.register(updatedNode, null, null); From ba86db4f27582e78b52f8bf56e01d260bf1c6d86 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Tue, 1 Nov 2022 15:02:23 +0200 Subject: [PATCH 07/18] list and usageinfo tests added in smoketest/admincli/datanode.robot --- .../main/smoketest/admincli/datanode.robot | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index b4ab5bf25cdd..844e37fd3862 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -34,6 +34,26 @@ Filter list by UUID ${count} = Get Length ${lines} Should Be Equal As Integers ${count} 1 +Filter list by Ip address + ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${ip} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' + ${output} = Execute ozone admin datanode list --ip "${ip}" + Should contain ${output} Datanode: ${uuid} + ${datanodes} = Get Lines Containing String ${output} Datanode: + @{lines} = Split To Lines ${datanodes} + ${count} = Get Length ${lines} + Should Be Equal As Integers ${count} 1 + +Filter list by Hostname + ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${hostname} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' + ${output} = Execute ozone admin datanode list --hostname "${hostname}" + Should contain ${output} Datanode: ${uuid} + ${datanodes} = Get Lines Containing String ${output} Datanode: + @{lines} = Split To Lines ${datanodes} + ${count} = Get Length ${lines} + Should Be Equal As Integers ${count} 1 + Filter list by NodeOperationalState ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' ${expected} = Execute ozone admin datanode list | grep -c 'Operational State: IN_SERVICE' @@ -54,6 +74,26 @@ Filter list by NodeState ${count} = Get Length ${lines} Should Be Equal As Integers ${count} ${expected} +Get usage info by UUID + ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${output} = Execute ozone admin datanode usageinfo --uuid "${uuid}" + Should contain ${output} Usage Information (1 Datanodes) + +Get usage info by Ip address + ${ip} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' + ${output} = Execute ozone admin datanode usageinfo --address "${ip}" + Should contain ${output} Usage Information (1 Datanodes) + +Get usage info by Hostname + ${hostname} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' + ${output} = Execute ozone admin datanode usageinfo --address "${hostname}" + Should contain ${output} Usage Information (1 Datanodes) + +Get usage info with invalid address + ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${output} = Execute ozone admin datanode usageinfo --address "${uuid}" + Should contain ${output} Usage Information (0 Datanodes) + Incomplete command ${output} = Execute And Ignore Error ozone admin datanode Should contain ${output} Incomplete command From aa323f9a8eb3b883a4ae4b7712246e91b824fd62 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Tue, 1 Nov 2022 15:34:35 +0200 Subject: [PATCH 08/18] rebasing with master --- .../hadoop/hdds/scm/cli/ContainerOperationClient.java | 9 --------- 1 file changed, 9 deletions(-) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java index 5ec3e7fa03f5..7c8bbe76b3cb 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java @@ -469,15 +469,6 @@ public int resetDeletedBlockRetryCount(List txIDs) throws IOException { return storageContainerLocationClient.resetDeletedBlockRetryCount(txIDs); } - /** - * Get Datanode Usage information by address or uuid. - * - * @param address datanode address String - * @param uuid datanode uuid String - * @return List of DatanodeUsageInfoProto. Each element contains info such as - * capacity, SCMused, and remaining space. - * @throws IOException - */ @Override public List getDatanodeUsageInfo( String address, String uuid) throws IOException { From d564fb8235a67c3836e659db85ea129922aced7b Mon Sep 17 00:00:00 2001 From: xBis7 Date: Tue, 1 Nov 2022 17:28:54 +0200 Subject: [PATCH 09/18] datanode.robot test user for ozonesecure --- hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index 844e37fd3862..8e151e847690 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -17,6 +17,7 @@ Documentation Test ozone admin datanode command Library BuiltIn Resource ../commonlib.robot +Suite Setup Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user testuser testuser.keytab Test Timeout 5 minutes *** Test Cases *** From d345827cd8f8266c10f438d4426b553e908e4be4 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Fri, 4 Nov 2022 15:06:25 +0200 Subject: [PATCH 10/18] RatisHelper.toRaftPeerAddress hostname as the default option --- .../java/org/apache/hadoop/hdds/ratis/RatisHelper.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java index 92be63940888..0d0382942e31 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java @@ -99,13 +99,13 @@ public static UUID toDatanodeId(RaftProtos.RaftPeerProto peerId) { private static String toRaftPeerAddress(DatanodeDetails id, Port.Name port) { final String address; - if (Strings.isNullOrEmpty(id.getIpAddress())) { - address = id.getHostName() + ":" + id.getPort(port).getValue(); - LOG.debug("Datanode is using hostname for raft peer address: {}", - address); - } else { + if (Strings.isNullOrEmpty(id.getHostName())) { address = id.getIpAddress() + ":" + id.getPort(port).getValue(); LOG.debug("Datanode is using IP for raft peer address: {}", address); + } else { + address = id.getHostName() + ":" + id.getPort(port).getValue(); + LOG.debug("Datanode is using hostname for raft peer address: {}", + address); } return address; } From b6dfe33d5b981fd457f8a9586c55115e5c5e3be9 Mon Sep 17 00:00:00 2001 From: xBis7 Date: Fri, 4 Nov 2022 22:43:06 +0200 Subject: [PATCH 11/18] DATANODE_USE_DN_HOSTNAME --- .../apache/hadoop/hdds/ratis/RatisHelper.java | 29 ++++++++++----- .../scm/node/NodeDecommissionManager.java | 19 +++++----- .../hadoop/hdds/scm/node/SCMNodeManager.java | 11 ++++-- .../hdds/scm/node/TestSCMNodeManager.java | 35 +++---------------- 4 files changed, 42 insertions(+), 52 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java index 0d0382942e31..be6076a9183b 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java @@ -29,9 +29,9 @@ import java.util.function.BiFunction; import java.util.stream.Collectors; -import com.google.common.base.Strings; import org.apache.hadoop.hdds.StringUtils; import org.apache.hadoop.hdds.conf.ConfigurationSource; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails.Port; import org.apache.hadoop.hdds.ratis.conf.RatisClientConfig; @@ -40,6 +40,7 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.security.x509.SecurityConfig; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.ratis.RaftConfigKeys; import org.apache.ratis.client.RaftClient; import org.apache.ratis.client.RaftClientConfigKeys; @@ -66,6 +67,8 @@ public final class RatisHelper { private static final Logger LOG = LoggerFactory.getLogger(RatisHelper.class); + private static final OzoneConfiguration CONF = new OzoneConfiguration(); + // Prefix for Ratis Server GRPC and Ratis client conf. public static final String HDDS_DATANODE_RATIS_PREFIX_KEY = "hdds.ratis"; @@ -98,16 +101,18 @@ public static UUID toDatanodeId(RaftProtos.RaftPeerProto peerId) { } private static String toRaftPeerAddress(DatanodeDetails id, Port.Name port) { - final String address; - if (Strings.isNullOrEmpty(id.getHostName())) { - address = id.getIpAddress() + ":" + id.getPort(port).getValue(); - LOG.debug("Datanode is using IP for raft peer address: {}", address); - } else { - address = id.getHostName() + ":" + id.getPort(port).getValue(); + if (datanodeUseHostName()) { + final String address = + id.getHostName() + ":" + id.getPort(port).getValue(); LOG.debug("Datanode is using hostname for raft peer address: {}", - address); + address); + return address; + } else { + final String address = + id.getIpAddress() + ":" + id.getPort(port).getValue(); + LOG.debug("Datanode is using IP for raft peer address: {}", address); + return address; } - return address; } public static RaftPeerId toRaftPeerId(DatanodeDetails id) { @@ -379,6 +384,12 @@ public static Long getMinReplicatedIndex( .min(Long::compareTo).orElse(null); } + private static boolean datanodeUseHostName() { + return CONF.getBoolean( + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); + } + private static Class getClass(String name, Class xface) { try { diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java index 268c0b27309c..1ea04cdfc3ce 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeDecommissionManager.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hdds.scm.ha.SCMContext; import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException; import org.apache.hadoop.hdds.server.events.EventPublisher; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,6 +57,7 @@ public class NodeDecommissionManager { private EventPublisher eventQueue; private ReplicationManager replicationManager; private OzoneConfiguration conf; + private boolean useHostnames; private long monitorInterval; private static final Logger LOG = @@ -114,17 +116,12 @@ private List mapHostnamesToDatanodes(List hosts) + host.getRawHostname(), e); } String dnsName; - String hostName = addr.getHostName(); - String hostAddress = addr.getHostAddress(); - List found = nodeManager.getNodesByAddress(hostAddress); - - if (found.size() == 0) { - found = nodeManager.getNodesByAddress(hostName); - dnsName = hostName; + if (useHostnames) { + dnsName = addr.getHostName(); } else { - dnsName = hostAddress; + dnsName = addr.getHostAddress(); } - + List found = nodeManager.getNodesByAddress(dnsName); if (found.size() == 0) { throw new InvalidHostStringException("Host " + host.getRawHostname() + " (" + dnsName + ") is not running any datanodes registered" @@ -189,6 +186,10 @@ public NodeDecommissionManager(OzoneConfiguration config, NodeManager nm, new ThreadFactoryBuilder().setNameFormat("DatanodeAdminManager-%d") .setDaemon(true).build()); + useHostnames = conf.getBoolean( + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME, + DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); + monitorInterval = conf.getTimeDuration( ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL, ScmConfigKeys.OZONE_SCM_DATANODE_ADMIN_MONITOR_INTERVAL_DEFAULT, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index e4481878c881..1ef5466d4a4f 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -120,6 +120,7 @@ public class SCMNodeManager implements NodeManager { private final SCMStorageConfig scmStorageConfig; private final NetworkTopology clusterMap; private final DNSToSwitchMapping dnsToSwitchMapping; + private final boolean useHostname; private final ConcurrentHashMap> dnsToUuidMap = new ConcurrentHashMap<>(); private final int numPipelinesPerMetadataVolume; @@ -157,6 +158,9 @@ public SCMNodeManager(OzoneConfiguration conf, this.dnsToSwitchMapping = ((newInstance instanceof CachedDNSToSwitchMapping) ? newInstance : new CachedDNSToSwitchMapping(newInstance)); + this.useHostname = conf.getBoolean( + DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, + DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT); this.numPipelinesPerMetadataVolume = conf.getInt(ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME, ScmConfigKeys.OZONE_SCM_PIPELINE_PER_METADATA_VOLUME_DEFAULT); @@ -360,8 +364,9 @@ public RegisteredCommand register( InetAddress dnAddress = Server.getRemoteIp(); if (dnAddress != null) { // Mostly called inside an RPC, update ip - datanodeDetails.setHostName(dnAddress.getHostName()); - + if (!useHostname) { + datanodeDetails.setHostName(dnAddress.getHostName()); + } datanodeDetails.setIpAddress(dnAddress.getHostAddress()); } @@ -369,7 +374,7 @@ public RegisteredCommand register( String ipAddress = datanodeDetails.getIpAddress(); String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - if (Strings.isNullOrEmpty(ipAddress)) { + if (useHostname) { networkLocation = nodeResolve(hostName); } else { networkLocation = nodeResolve(ipAddress); diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index edd1996847c1..923d83960af8 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -1936,36 +1936,7 @@ public void testGetNodesByAddress() * Test node register with updated IP and host name. */ @Test - public void testNodeWithUpdatedIpAndHostname() - throws IOException, InterruptedException, AuthenticationException { - String updatedIpAddress = "2.3.4.5"; - String updatedHostName = "host2"; - testScmRegisterNodeWithUpdatedIpAndHostname( - updatedIpAddress, updatedHostName); - } - - /** - * Test node register with no IP and updated host name. - */ - @Test - public void testNodeWithNoIpAndUpdatedHostname() - throws IOException, InterruptedException, AuthenticationException { - String updatedHostName = "host2"; - testScmRegisterNodeWithUpdatedIpAndHostname(null, updatedHostName); - } - - /** - * Test node register with updated IP and no host name. - */ - @Test - public void testNodeWithUpdatedIpAndNoHostname() - throws IOException, InterruptedException, AuthenticationException { - String updatedIpAddress = "2.3.4.5"; - testScmRegisterNodeWithUpdatedIpAndHostname(updatedIpAddress, null); - } - - public void testScmRegisterNodeWithUpdatedIpAndHostname( - String updatedIpAddress, String updatedHostName) + public void testScmRegisterNodeWithUpdatedIpAndHostname() throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, @@ -2006,7 +1977,9 @@ public void testScmRegisterNodeWithUpdatedIpAndHostname( .startsWith("/rack1/ng")); assertTrue(returnedNode.getParent() != null); - // updating ip address and host name + // test updating ip address and host name + String updatedIpAddress = "2.3.4.5"; + String updatedHostName = "host2"; DatanodeDetails updatedNode = createDatanodeDetails( nodeUuid, updatedHostName, updatedIpAddress, null); nodeManager.register(updatedNode, null, null); From a494d53fd99401d9063ae88df8aa7024596a2292 Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 19:08:52 +0100 Subject: [PATCH 12/18] run testScmRegisterNodeWithNetworkTopology with both USE_DN_HOSTNAME=true/false --- .../apache/hadoop/hdds/scm/node/TestSCMNodeManager.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java index 909498a4379b..d13cd8952ae9 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodeManager.java @@ -1839,12 +1839,15 @@ public void testScmRegisterNodeWith4LayerNetworkTopology() } } - @Test - public void testScmRegisterNodeWithNetworkTopology() + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testScmRegisterNodeWithNetworkTopology(boolean useHostname) throws IOException, InterruptedException, AuthenticationException { OzoneConfiguration conf = getConf(); conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000, MILLISECONDS); + conf.setBoolean(DFSConfigKeysLegacy.DFS_DATANODE_USE_DN_HOSTNAME, + useHostname); // create table mapping file String[] hostNames = {"host1", "host2", "host3", "host4"}; From 52bafdc290e0e51f9aa0ca9194ca71a2e0ff8bfc Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 19:42:12 +0100 Subject: [PATCH 13/18] reduce `ozone admin datanode list` invocations in robot test --- .../src/main/smoketest/admincli/datanode.robot | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index 5a5a7b235181..00d047cd97fd 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -37,8 +37,8 @@ Filter list by UUID Should Be Equal As Integers ${count} 1 Filter list by Ip address - ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' - ${ip} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' + ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' + ${ip} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' ${output} = Execute ozone admin datanode list --ip "${ip}" Should contain ${output} Datanode: ${uuid} ${datanodes} = Get Lines Containing String ${output} Datanode: @@ -47,8 +47,8 @@ Filter list by Ip address Should Be Equal As Integers ${count} 1 Filter list by Hostname - ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' - ${hostname} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' + ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' + ${hostname} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' ${output} = Execute ozone admin datanode list --hostname "${hostname}" Should contain ${output} Datanode: ${uuid} ${datanodes} = Get Lines Containing String ${output} Datanode: @@ -77,22 +77,22 @@ Filter list by NodeState Should Be Equal As Integers ${count} ${expected} Get usage info by UUID - ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${output} = Execute ozone admin datanode usageinfo --uuid "${uuid}" Should contain ${output} Usage Information (1 Datanodes) Get usage info by Ip address - ${ip} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' + ${ip} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' ${output} = Execute ozone admin datanode usageinfo --address "${ip}" Should contain ${output} Usage Information (1 Datanodes) Get usage info by Hostname - ${hostname} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' + ${hostname} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' ${output} = Execute ozone admin datanode usageinfo --address "${hostname}" Should contain ${output} Usage Information (1 Datanodes) Get usage info with invalid address - ${uuid} = Execute ozone admin datanode list | grep '^Datanode:' | head -1 | awk '{ print \$2 }' + ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${output} = Execute ozone admin datanode usageinfo --address "${uuid}" Should contain ${output} Usage Information (0 Datanodes) From 96976d596d9c58658739994292ce5e21bf8b2ffe Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 20:00:19 +0100 Subject: [PATCH 14/18] fix checkstyle --- .../org/apache/hadoop/hdds/scm/node/SCMNodeManager.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 300c227e6837..c504cd6296a8 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -392,6 +392,7 @@ public RegisteredCommand register( datanodeDetails.setNetworkLocation(networkLocation); } + final UUID uuid = datanodeDetails.getUuid(); if (!isNodeRegistered(datanodeDetails)) { try { clusterMap.add(datanodeDetails); @@ -399,8 +400,8 @@ public RegisteredCommand register( // Check that datanode in nodeStateManager has topology parent set DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); - addToDnsToUuidMap(ipAddress, datanodeDetails.getUuid()); - addToDnsToUuidMap(hostName, datanodeDetails.getUuid()); + addToDnsToUuidMap(ipAddress, uuid); + addToDnsToUuidMap(hostName, uuid); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); LOG.info("Registered Data node : {}", datanodeDetails.toDebugString()); @@ -430,8 +431,8 @@ public RegisteredCommand register( String oldIpAddress = datanodeInfo.getIpAddress(); String oldHostName = datanodeInfo.getHostName(); - updateDnsToUuidMap(oldIpAddress, ipAddress, datanodeDetails.getUuid()); - updateDnsToUuidMap(oldHostName, hostName, datanodeDetails.getUuid()); + updateDnsToUuidMap(oldIpAddress, ipAddress, uuid); + updateDnsToUuidMap(oldHostName, hostName, uuid); nodeStateManager.updateNode(datanodeDetails, layoutInfo); DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); From 062312cc204ad32228fd96771ecd829ef624a586 Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 20:04:37 +0100 Subject: [PATCH 15/18] reuse ipAddress/hostName variables --- .../hadoop/hdds/scm/node/SCMNodeManager.java | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index c504cd6296a8..10295cd5e2d9 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -82,6 +82,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; @@ -379,15 +380,10 @@ public RegisteredCommand register( datanodeDetails.setIpAddress(dnAddress.getHostAddress()); } - String networkLocation; - String ipAddress = datanodeDetails.getIpAddress(); - String hostName = datanodeDetails.getHostName(); + final String ipAddress = datanodeDetails.getIpAddress(); + final String hostName = datanodeDetails.getHostName(); datanodeDetails.setNetworkName(datanodeDetails.getUuidString()); - if (useHostname) { - networkLocation = nodeResolve(hostName); - } else { - networkLocation = nodeResolve(ipAddress); - } + String networkLocation = nodeResolve(useHostname ? hostName : ipAddress); if (networkLocation != null) { datanodeDetails.setNetworkLocation(networkLocation); } @@ -420,17 +416,16 @@ public RegisteredCommand register( try { final DatanodeInfo datanodeInfo = nodeStateManager.getNode(datanodeDetails); - if (!datanodeInfo.getIpAddress().equals(datanodeDetails.getIpAddress()) - || !datanodeInfo.getHostName() - .equals(datanodeDetails.getHostName())) { + final String oldIpAddress = datanodeInfo.getIpAddress(); + final String oldHostName = datanodeInfo.getHostName(); + if (!Objects.equals(oldIpAddress, ipAddress) + || !Objects.equals(oldHostName, hostName)) { LOG.info("Updating data node {} from {} to {}", datanodeDetails.getUuidString(), datanodeInfo, datanodeDetails); clusterMap.update(datanodeInfo, datanodeDetails); - String oldIpAddress = datanodeInfo.getIpAddress(); - String oldHostName = datanodeInfo.getHostName(); updateDnsToUuidMap(oldIpAddress, ipAddress, uuid); updateDnsToUuidMap(oldHostName, hostName, uuid); From 1e6f984598667db3ac33f2e308651d38c058d025 Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 20:07:08 +0100 Subject: [PATCH 16/18] slightly more uniform log messages --- .../org/apache/hadoop/hdds/scm/node/SCMNodeManager.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 10295cd5e2d9..02165f6a7b75 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -400,11 +400,11 @@ public RegisteredCommand register( addToDnsToUuidMap(hostName, uuid); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); - LOG.info("Registered Data node : {}", datanodeDetails.toDebugString()); + LOG.info("Registered datanode: {}", datanodeDetails.toDebugString()); scmNodeEventPublisher.fireEvent(SCMEvents.NEW_NODE, datanodeDetails); } catch (NodeAlreadyExistsException e) { if (LOG.isTraceEnabled()) { - LOG.trace("Datanode is already registered. Datanode: {}", + LOG.trace("Datanode is already registered: {}", datanodeDetails); } } catch (NodeNotFoundException e) { @@ -420,7 +420,7 @@ public RegisteredCommand register( final String oldHostName = datanodeInfo.getHostName(); if (!Objects.equals(oldIpAddress, ipAddress) || !Objects.equals(oldHostName, hostName)) { - LOG.info("Updating data node {} from {} to {}", + LOG.info("Updating datanode {} from {} to {}", datanodeDetails.getUuidString(), datanodeInfo, datanodeDetails); @@ -433,7 +433,7 @@ public RegisteredCommand register( DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); processNodeReport(datanodeDetails, nodeReport); - LOG.info("Updated Datanode to: {}", dn); + LOG.info("Updated datanode to: {}", dn); scmNodeEventPublisher .fireEvent(SCMEvents.NODE_ADDRESS_UPDATE, dn); } From a0d91da8988278a1dfc0e293d12724f838739a37 Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Tue, 21 Nov 2023 20:26:16 +0100 Subject: [PATCH 17/18] reduce duplication in robot test --- .../main/smoketest/admincli/datanode.robot | 39 +++++++------------ 1 file changed, 14 insertions(+), 25 deletions(-) diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot index 00d047cd97fd..b4ee5b952906 100644 --- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot +++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot @@ -20,6 +20,15 @@ Resource ../commonlib.robot Suite Setup Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user testuser testuser.keytab Test Timeout 5 minutes +*** Keywords *** +Assert Output + [arguments] ${output} ${expected} ${uuid} + Should contain ${output} Datanode: ${uuid} + ${datanodes} = Get Lines Containing String ${output} Datanode: + @{lines} = Split To Lines ${datanodes} + ${count} = Get Length ${lines} + Should Be Equal As Integers ${count} ${expected} + *** Test Cases *** List datanodes Execute ozone admin datanode list > datanode.list @@ -30,51 +39,31 @@ List datanodes Filter list by UUID ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${output} = Execute ozone admin datanode list --id "${uuid}" - Should contain ${output} Datanode: ${uuid} - ${datanodes} = Get Lines Containing String ${output} Datanode: - @{lines} = Split To Lines ${datanodes} - ${count} = Get Length ${lines} - Should Be Equal As Integers ${count} 1 + Assert Output ${output} 1 ${uuid} Filter list by Ip address ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${ip} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$3 }' ${output} = Execute ozone admin datanode list --ip "${ip}" - Should contain ${output} Datanode: ${uuid} - ${datanodes} = Get Lines Containing String ${output} Datanode: - @{lines} = Split To Lines ${datanodes} - ${count} = Get Length ${lines} - Should Be Equal As Integers ${count} 1 + Assert Output ${output} 1 ${uuid} Filter list by Hostname ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${hostname} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$3 }' | awk -F '[/]' '{ print \$4 }' ${output} = Execute ozone admin datanode list --hostname "${hostname}" - Should contain ${output} Datanode: ${uuid} - ${datanodes} = Get Lines Containing String ${output} Datanode: - @{lines} = Split To Lines ${datanodes} - ${count} = Get Length ${lines} - Should Be Equal As Integers ${count} 1 + Assert Output ${output} 1 ${uuid} Filter list by NodeOperationalState ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${expected} = Execute grep -c 'Operational State: IN_SERVICE' datanode.list ${output} = Execute ozone admin datanode list --operational-state IN_SERVICE - Should contain ${output} Datanode: ${uuid} - ${datanodes} = Get Lines Containing String ${output} Datanode: - @{lines} = Split To Lines ${datanodes} - ${count} = Get Length ${lines} - Should Be Equal As Integers ${count} ${expected} + Assert Output ${output} ${expected} ${uuid} Filter list by NodeState ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' ${expected} = Execute grep -c 'Health State: HEALTHY' datanode.list ${output} = Execute ozone admin datanode list --node-state HEALTHY - Should contain ${output} Datanode: ${uuid} - ${datanodes} = Get Lines Containing String ${output} Datanode: - @{lines} = Split To Lines ${datanodes} - ${count} = Get Length ${lines} - Should Be Equal As Integers ${count} ${expected} + Assert Output ${output} ${expected} ${uuid} Get usage info by UUID ${uuid} = Execute grep '^Datanode:' datanode.list | head -1 | awk '{ print \$2 }' From dc5983cf01fb433647e86620ada431868fb5b5ea Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Mon, 27 Nov 2023 11:59:03 +0100 Subject: [PATCH 18/18] Update IP and hostname in the same method call --- .../hadoop/hdds/scm/node/SCMNodeManager.java | 73 ++++++++++--------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 02165f6a7b75..3103d5a7d4a7 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -396,8 +396,7 @@ public RegisteredCommand register( // Check that datanode in nodeStateManager has topology parent set DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); - addToDnsToUuidMap(ipAddress, uuid); - addToDnsToUuidMap(hostName, uuid); + addToDnsToUuidMap(uuid, ipAddress, hostName); // Updating Node Report, as registration is successful processNodeReport(datanodeDetails, nodeReport); LOG.info("Registered datanode: {}", datanodeDetails.toDebugString()); @@ -414,28 +413,20 @@ public RegisteredCommand register( } else { // Update datanode if it is registered but the ip or hostname changes try { - final DatanodeInfo datanodeInfo = - nodeStateManager.getNode(datanodeDetails); - final String oldIpAddress = datanodeInfo.getIpAddress(); - final String oldHostName = datanodeInfo.getHostName(); - if (!Objects.equals(oldIpAddress, ipAddress) - || !Objects.equals(oldHostName, hostName)) { + final DatanodeInfo oldNode = nodeStateManager.getNode(datanodeDetails); + if (updateDnsToUuidMap(oldNode.getHostName(), oldNode.getIpAddress(), + hostName, ipAddress, uuid)) { LOG.info("Updating datanode {} from {} to {}", datanodeDetails.getUuidString(), - datanodeInfo, + oldNode, datanodeDetails); - clusterMap.update(datanodeInfo, datanodeDetails); - - updateDnsToUuidMap(oldIpAddress, ipAddress, uuid); - updateDnsToUuidMap(oldHostName, hostName, uuid); - + clusterMap.update(oldNode, datanodeDetails); nodeStateManager.updateNode(datanodeDetails, layoutInfo); DatanodeDetails dn = nodeStateManager.getNode(datanodeDetails); Preconditions.checkState(dn.getParent() != null); processNodeReport(datanodeDetails, nodeReport); LOG.info("Updated datanode to: {}", dn); - scmNodeEventPublisher - .fireEvent(SCMEvents.NODE_ADDRESS_UPDATE, dn); + scmNodeEventPublisher.fireEvent(SCMEvents.NODE_ADDRESS_UPDATE, dn); } } catch (NodeNotFoundException e) { LOG.error("Cannot find datanode {} from nodeStateManager", @@ -452,33 +443,49 @@ public RegisteredCommand register( /** * Add an entry to the dnsToUuidMap, which maps hostname / IP to the DNs * running on that host. As each address can have many DNs running on it, - * this is a one to many mapping. + * and each host can have multiple addresses, + * this is a many to many mapping. * - * @param addr the hostname or IP of the node * @param uuid the UUID of the registered node. + * @param addresses hostname and/or IP of the node */ - private synchronized void addToDnsToUuidMap(String addr, UUID uuid) { - if (!Strings.isNullOrEmpty(addr)) { - dnsToUuidMap.computeIfAbsent(addr, k -> ConcurrentHashMap.newKeySet()) - .add(uuid); + private synchronized void addToDnsToUuidMap(UUID uuid, String... addresses) { + for (String addr : addresses) { + if (!Strings.isNullOrEmpty(addr)) { + dnsToUuidMap.computeIfAbsent(addr, k -> ConcurrentHashMap.newKeySet()) + .add(uuid); + } } } - private synchronized void removeFromDnsToUuidMap(String addr, UUID uuid) { - Set dnSet = dnsToUuidMap.get(addr); - if (dnSet != null && dnSet.remove(uuid) && dnSet.isEmpty()) { - dnsToUuidMap.remove(addr); + private synchronized void removeFromDnsToUuidMap(UUID uuid, String address) { + if (address != null) { + Set dnSet = dnsToUuidMap.get(address); + if (dnSet != null && dnSet.remove(uuid) && dnSet.isEmpty()) { + dnsToUuidMap.remove(address); + } } } - private synchronized void updateDnsToUuidMap( - String oldDnsName, String newDnsName, UUID uuid) { - Preconditions.checkNotNull(oldDnsName, "old address == null"); - Preconditions.checkNotNull(newDnsName, "new address == null"); - if (!oldDnsName.equals(newDnsName)) { - removeFromDnsToUuidMap(oldDnsName, uuid); - addToDnsToUuidMap(newDnsName, uuid); + private boolean updateDnsToUuidMap( + String oldHostName, String oldIpAddress, + String newHostName, String newIpAddress, + UUID uuid) { + final boolean ipChanged = !Objects.equals(oldIpAddress, newIpAddress); + final boolean hostNameChanged = !Objects.equals(oldHostName, newHostName); + if (ipChanged || hostNameChanged) { + synchronized (this) { + if (ipChanged) { + removeFromDnsToUuidMap(uuid, oldIpAddress); + addToDnsToUuidMap(uuid, newIpAddress); + } + if (hostNameChanged) { + removeFromDnsToUuidMap(uuid, oldHostName); + addToDnsToUuidMap(uuid, newHostName); + } + } } + return ipChanged || hostNameChanged; } /**