diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java index 290dd1d4f95f..c0b1ddbd1dd9 100644 --- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java +++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java @@ -26,13 +26,17 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.hadoop.hdds.annotation.InterfaceAudience; import org.apache.hadoop.hdds.annotation.InterfaceStability; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3AUTHINFO; + /** * The token identifier for Ozone Master. */ @@ -77,6 +81,55 @@ public Text getKind() { return KIND_NAME; } + /** Instead of relying on proto serialization, this + * provides explicit serialization for OzoneTokenIdentifier. + * @return byte[] + */ + public byte[] toUniqueSerializedKey() { + DataOutputBuffer buf = new DataOutputBuffer(); + try { + super.write(buf); + WritableUtils.writeVInt(buf, getTokenType().getNumber()); + // Set s3 specific fields. + if (getTokenType().equals(S3AUTHINFO)) { + WritableUtils.writeString(buf, getAwsAccessId()); + WritableUtils.writeString(buf, getSignature()); + WritableUtils.writeString(buf, getStrToSign()); + } else { + WritableUtils.writeString(buf, getOmCertSerialId()); + WritableUtils.writeString(buf, getOmServiceId()); + } + } catch (java.io.IOException e) { + throw new IllegalArgumentException( + "Can't encode the the raw data ", e); + } + return buf.getData(); + } + + /** Instead of relying on proto deserialization, this + * provides explicit deserialization for OzoneTokenIdentifier. + * @return byte[] + */ + public OzoneTokenIdentifier fromUniqueSerializedKey(byte[] rawData) + throws IOException { + DataInputBuffer in = new DataInputBuffer(); + in.reset(rawData, rawData.length); + super.readFields(in); + int type = WritableUtils.readVInt(in); + // Set s3 specific fields. + if (type == S3AUTHINFO.getNumber()) { + this.tokenType = Type.S3AUTHINFO; + setAwsAccessId(WritableUtils.readString(in)); + setSignature(WritableUtils.readString(in)); + setStrToSign(WritableUtils.readString(in)); + } else { + this.tokenType = Type.DELEGATION_TOKEN; + setOmCertSerialId(WritableUtils.readString(in)); + setOmServiceId(WritableUtils.readString(in)); + } + return this; + } + /** * Overrides default implementation to write using Protobuf. * @@ -92,7 +145,6 @@ public void write(DataOutput out) throws IOException { .setRealUser(getRealUser().toString()) .setRenewer(getRenewer().toString()) .setIssueDate(getIssueDate()) - .setMaxDate(getMaxDate()) .setSequenceNumber(getSequenceNumber()) .setMasterKeyId(getMasterKeyId()); @@ -332,4 +384,4 @@ public String toString() { .append(", omServiceId=").append(getOmServiceId()); return buffer.toString(); } -} \ No newline at end of file +} diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java index 22656d887b66..592cae3a366c 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java @@ -19,10 +19,12 @@ import com.google.common.base.Preconditions; import com.google.protobuf.InvalidProtocolBufferException; + import org.apache.hadoop.ozone.security.OzoneTokenIdentifier; import org.apache.hadoop.hdds.utils.db.Codec; import java.io.IOException; +import java.nio.BufferUnderflowException; /** * Codec to encode TokenIdentifierCodec as byte array. @@ -33,7 +35,7 @@ public class TokenIdentifierCodec implements Codec { public byte[] toPersistedFormat(OzoneTokenIdentifier object) { Preconditions .checkNotNull(object, "Null object can't be converted to byte array."); - return object.getBytes(); + return object.toUniqueSerializedKey(); } @Override @@ -42,8 +44,16 @@ public OzoneTokenIdentifier fromPersistedFormat(byte[] rawData) Preconditions.checkNotNull(rawData, "Null byte array can't converted to real object."); try { - return OzoneTokenIdentifier.readProtoBuf(rawData); - } catch (InvalidProtocolBufferException e) { + OzoneTokenIdentifier object = OzoneTokenIdentifier.newInstance(); + return object.fromUniqueSerializedKey(rawData); + } catch (IOException ex) { + try { + return OzoneTokenIdentifier.readProtoBuf(rawData); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException( + "Can't encode the the raw data from the byte array", e); + } + } catch (BufferUnderflowException e) { throw new IllegalArgumentException( "Can't encode the the raw data from the byte array", e); } diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java index 518953f91c62..391759a8df54 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.io.Text; +import org.apache.hadoop.ozone.om.codec.TokenIdentifierCodec; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.TestSSLFactory; import org.apache.hadoop.security.token.Token; @@ -327,4 +328,22 @@ public void testTokenSerialization() throws IOException { idDecode.readFields(in); Assert.assertEquals(idEncode, idDecode); } + + @Test + public void testTokenPersistence() throws IOException { + OzoneTokenIdentifier idWrite = getIdentifierInst(); + idWrite.setOmServiceId("defaultServiceId"); + + byte[] oldIdBytes = idWrite.getBytes(); + TokenIdentifierCodec idCodec = new TokenIdentifierCodec(); + + OzoneTokenIdentifier idRead = null; + try { + idRead = idCodec.fromPersistedFormat(oldIdBytes); + } catch (IOException ex) { + Assert.fail("Should not fail to load old token format"); + } + Assert.assertEquals("Deserialize Serialized Token should equal.", + idWrite, idRead); + } } \ No newline at end of file