diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java index 1c324ac8ff59..6a19a6d50e4c 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java @@ -86,8 +86,7 @@ public static Codec getCodec() { /** * DataNode's unique identifier in the cluster. */ - private final UUID uuid; - private final StringWithByteString uuidString; + private final DatanodeID id; private final String threadNamePrefix; private StringWithByteString ipAddress; private StringWithByteString hostName; @@ -103,9 +102,8 @@ public static Codec getCodec() { private DatanodeDetails(Builder b) { super(b.hostName, b.networkLocation, NetConstants.NODE_COST_DEFAULT); - uuid = b.id; - uuidString = StringWithByteString.valueOf(uuid.toString()); - threadNamePrefix = HddsUtils.threadNamePrefix(uuidString); + id = b.id; + threadNamePrefix = HddsUtils.threadNamePrefix(id.toString()); ipAddress = b.ipAddress; hostName = b.hostName; ports = b.ports; @@ -129,9 +127,8 @@ public DatanodeDetails(DatanodeDetails datanodeDetails) { super(datanodeDetails.getHostNameAsByteString(), datanodeDetails.getNetworkLocationAsByteString(), datanodeDetails.getParent(), datanodeDetails.getLevel(), datanodeDetails.getCost()); - this.uuid = datanodeDetails.uuid; - this.uuidString = datanodeDetails.uuidString; - threadNamePrefix = HddsUtils.threadNamePrefix(uuidString); + this.id = datanodeDetails.id; + threadNamePrefix = HddsUtils.threadNamePrefix(id.toString()); this.ipAddress = datanodeDetails.ipAddress; this.hostName = datanodeDetails.hostName; this.ports = datanodeDetails.ports; @@ -148,13 +145,19 @@ public DatanodeDetails(DatanodeDetails datanodeDetails) { this.currentVersion = datanodeDetails.getCurrentVersion(); } + public DatanodeID getID() { + return id; + } + /** * Returns the DataNode UUID. * * @return UUID of DataNode */ + // TODO: Remove this in follow-up Jira (HDDS-12015) + @Deprecated public UUID getUuid() { - return uuid; + return id.getUuid(); } /** @@ -163,7 +166,7 @@ public UUID getUuid() { * @return UUID of DataNode */ public String getUuidString() { - return uuidString.getString(); + return id.toString(); } /** @@ -392,11 +395,16 @@ public Port getStandalonePort() { public static DatanodeDetails.Builder newBuilder( HddsProtos.DatanodeDetailsProto datanodeDetailsProto) { DatanodeDetails.Builder builder = newBuilder(); - if (datanodeDetailsProto.hasUuid128()) { + + if (datanodeDetailsProto.hasId()) { + builder.setID(DatanodeID.fromProto(datanodeDetailsProto.getId())); + // The else parts are for backward compatibility. + } else if (datanodeDetailsProto.hasUuid128()) { HddsProtos.UUID uuid = datanodeDetailsProto.getUuid128(); - builder.setUuid(new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits())); + builder.setID(DatanodeID.of(new UUID( + uuid.getMostSigBits(), uuid.getLeastSigBits()))); } else if (datanodeDetailsProto.hasUuid()) { - builder.setUuid(UUID.fromString(datanodeDetailsProto.getUuid())); + builder.setID(DatanodeID.fromUuidString(datanodeDetailsProto.getUuid())); } if (datanodeDetailsProto.hasIpAddress()) { @@ -504,20 +512,17 @@ public HddsProtos.DatanodeDetailsProto toProto(int clientVersion, Set * If empty, all available ports will be included. * @return A {@link HddsProtos.DatanodeDetailsProto.Builder} Object. */ - public HddsProtos.DatanodeDetailsProto.Builder toProtoBuilder( int clientVersion, Set filterPorts) { - HddsProtos.UUID uuid128 = HddsProtos.UUID.newBuilder() - .setMostSigBits(uuid.getMostSignificantBits()) - .setLeastSigBits(uuid.getLeastSignificantBits()) - .build(); + final HddsProtos.DatanodeIDProto idProto = id.toProto(); + final HddsProtos.DatanodeDetailsProto.Builder builder = + HddsProtos.DatanodeDetailsProto.newBuilder(); - HddsProtos.DatanodeDetailsProto.Builder builder = - HddsProtos.DatanodeDetailsProto.newBuilder() - .setUuid128(uuid128); - - builder.setUuidBytes(uuidString.getBytes()); + builder.setId(idProto); + // Both are deprecated. + builder.setUuid128(idProto.getUuid()); + builder.setUuidBytes(id.getByteString()); if (ipAddress != null) { builder.setIpAddressBytes(ipAddress.getBytes()); @@ -619,11 +624,11 @@ public void setCurrentVersion(int currentVersion) { @Override public String toString() { - return uuidString + "(" + hostName + "/" + ipAddress + ")"; + return id + "(" + hostName + "/" + ipAddress + ")"; } public String toDebugString() { - return uuid.toString() + "{" + + return id + "{" + "ip: " + ipAddress + ", host: " + @@ -639,13 +644,13 @@ public String toDebugString() { @Override public int compareTo(DatanodeDetails that) { - return this.getUuid().compareTo(that.getUuid()); + return this.id.compareTo(that.id); } @Override public boolean equals(Object obj) { return obj instanceof DatanodeDetails && - uuid.equals(((DatanodeDetails) obj).uuid); + id.equals(((DatanodeDetails) obj).id); } @@ -664,7 +669,7 @@ public boolean compareNodeValues(DatanodeDetails datanodeDetails) { @Override public int hashCode() { - return uuid.hashCode(); + return id.hashCode(); } /** @@ -685,7 +690,7 @@ public String threadNamePrefix() { * Builder class for building DatanodeDetails. */ public static final class Builder { - private UUID id; + private DatanodeID id; private StringWithByteString ipAddress; private StringWithByteString hostName; private StringWithByteString networkName; @@ -716,7 +721,7 @@ private Builder() { * @return DatanodeDetails.Builder */ public Builder setDatanodeDetails(DatanodeDetails details) { - this.id = details.getUuid(); + this.id = details.id; this.ipAddress = details.getIpAddressAsByteString(); this.hostName = details.getHostNameAsByteString(); this.networkName = details.getHostNameAsByteString(); @@ -740,7 +745,12 @@ public Builder setDatanodeDetails(DatanodeDetails details) { * @return DatanodeDetails.Builder */ public Builder setUuid(UUID uuid) { - this.id = uuid; + this.id = DatanodeID.of(uuid); + return this; + } + + public Builder setID(DatanodeID dnId) { + this.id = dnId; return this; } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeID.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeID.java new file mode 100644 index 000000000000..dee5bbc0aabe --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeID.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.protocol; + +import com.google.protobuf.ByteString; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeIDProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.ozone.util.StringWithByteString; + +/** + * DatanodeID is the primary identifier of the Datanode. + * They are unique for every Datanode in the cluster. + *

+ * This class is immutable and thread safe. + */ +public final class DatanodeID implements Comparable { + + private static final ConcurrentMap CACHE = new ConcurrentHashMap<>(); + + private final UUID uuid; + private final StringWithByteString uuidByteString; + + private DatanodeID(final UUID uuid) { + this.uuid = uuid; + this.uuidByteString = StringWithByteString.valueOf(uuid.toString()); + } + + // Mainly used for JSON conversion + public String getID() { + return toString(); + } + + @Override + public int compareTo(final DatanodeID that) { + return this.uuid.compareTo(that.uuid); + } + + @Override + public boolean equals(final Object obj) { + return obj instanceof DatanodeID && + uuid.equals(((DatanodeID) obj).uuid); + } + + @Override + public int hashCode() { + return uuid.hashCode(); + } + + @Override + public String toString() { + return uuidByteString.getString(); + } + + /** + * This will be removed once the proto structure is refactored + * to remove deprecated fields. + */ + @Deprecated + public ByteString getByteString() { + return uuidByteString.getBytes(); + } + + public DatanodeIDProto toProto() { + return DatanodeIDProto.newBuilder().setUuid(toProto(uuid)).build(); + } + + public static DatanodeID fromProto(final DatanodeIDProto proto) { + return of(fromProto(proto.getUuid())); + } + + public static DatanodeID fromUuidString(final String id) { + return of(UUID.fromString(id)); + } + + public static DatanodeID of(final UUID id) { + return CACHE.computeIfAbsent(id, DatanodeID::new); + } + + /** + * Returns a random DatanodeID. + */ + public static DatanodeID randomID() { + // We don't want to add Random ID to cache. + return new DatanodeID(UUID.randomUUID()); + } + + private static UUID fromProto(final HddsProtos.UUID id) { + return new UUID(id.getMostSigBits(), id.getLeastSigBits()); + } + + private static HddsProtos.UUID toProto(final UUID id) { + return HddsProtos.UUID.newBuilder() + .setMostSigBits(id.getMostSignificantBits()) + .setLeastSigBits(id.getLeastSignificantBits()) + .build(); + } + + // TODO: Remove this in follow-up Jira. (HDDS-12015) + UUID getUuid() { + return uuid; + } +} diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java index a6980e232b17..b45f435387b3 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java @@ -25,7 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -39,6 +38,7 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.ReconfigurationHandler; import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeID; import org.apache.hadoop.hdds.protocol.SecretKeyProtocol; import org.apache.hadoop.hdds.protocolPB.SCMSecurityProtocolClientSideTranslatorPB; import org.apache.hadoop.hdds.scm.ha.SCMHAUtils; @@ -439,7 +439,7 @@ private DatanodeDetails initializeDatanodeDetails() } else { // There is no datanode.id file, this might be the first time datanode // is started. - details = DatanodeDetails.newBuilder().setUuid(UUID.randomUUID()).build(); + details = DatanodeDetails.newBuilder().setID(DatanodeID.randomID()).build(); details.setInitialVersion(getInitialVersion()); } // Current version is always overridden to the latest diff --git a/hadoop-hdds/interface-client/src/main/proto/hdds.proto b/hadoop-hdds/interface-client/src/main/proto/hdds.proto index 1fc5884e24fa..28c8a713492a 100644 --- a/hadoop-hdds/interface-client/src/main/proto/hdds.proto +++ b/hadoop-hdds/interface-client/src/main/proto/hdds.proto @@ -33,8 +33,12 @@ message UUID { required int64 leastSigBits = 2; } +message DatanodeIDProto { + required UUID uuid = 1; +} + message DatanodeDetailsProto { - // deprecated, please use uuid128 instead + // deprecated, please use DatanodeIDProto instead optional string uuid = 1; // UUID assigned to the Datanode. required string ipAddress = 2; // IP address required string hostName = 3; // hostname @@ -49,6 +53,8 @@ message DatanodeDetailsProto { // TODO(runzhiwang): when uuid is gone, specify 1 as the index of uuid128 and mark as required optional UUID uuid128 = 100; // UUID with 128 bits assigned to the Datanode. optional uint32 level = 101; + // TODO: Replace UUID with DatanodeID and make it required + optional DatanodeIDProto id = 102; } /** @@ -123,14 +129,20 @@ message Pipeline { optional ReplicationType type = 3 [default = STAND_ALONE]; optional ReplicationFactor factor = 4 [default = ONE]; required PipelineID id = 5; + // TODO: Deprecate this and replace with leaderDatanodeID optional string leaderID = 6; repeated uint32 memberOrders = 7; optional uint64 creationTimeStamp = 8; + // TODO: Deprecate this and replace with suggestedLeaderDatanodeID optional UUID suggestedLeaderID = 9; repeated uint32 memberReplicaIndexes = 10; optional ECReplicationConfig ecReplicationConfig = 11; - // TODO(runzhiwang): when leaderID is gone, specify 6 as the index of leaderID128 + + // TODO: Replace UUID with DatanodeIDProto optional UUID leaderID128 = 100; + + optional DatanodeIDProto leaderDatanodeID = 101; + optional DatanodeIDProto suggestedLeaderDatanodeID = 102; } message KeyValue { @@ -324,6 +336,7 @@ enum ScmOps { } message ExcludeListProto { + // TODO: Replace with DatanodeID repeated string datanodes = 1; // Replace int64 with ContainerID message repeated int64 containerIds = 2; diff --git a/hadoop-hdds/interface-server/src/main/proto/ScmServerDatanodeHeartbeatProtocol.proto b/hadoop-hdds/interface-server/src/main/proto/ScmServerDatanodeHeartbeatProtocol.proto index 2994073c0240..648cf77883dd 100644 --- a/hadoop-hdds/interface-server/src/main/proto/ScmServerDatanodeHeartbeatProtocol.proto +++ b/hadoop-hdds/interface-server/src/main/proto/ScmServerDatanodeHeartbeatProtocol.proto @@ -150,6 +150,7 @@ message CommandQueueReportProto { * A group of commands for the datanode to execute */ message SCMHeartbeatResponseProto { + // TODO: change this to DatanodeID required string datanodeUUID = 1; repeated SCMCommandProto commands = 2;