Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,17 @@
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
import org.apache.hadoop.hdds.annotation.InterfaceStability;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;

import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3AUTHINFO;


/**
* The token identifier for Ozone Master.
*/
Expand Down Expand Up @@ -77,6 +81,55 @@ public Text getKind() {
return KIND_NAME;
}

/** Instead of relying on proto serialization, this
* provides explicit serialization for OzoneTokenIdentifier.
* @return byte[]
*/
public byte[] toUniqueSerializedKey() {
DataOutputBuffer buf = new DataOutputBuffer();
try {
super.write(buf);
WritableUtils.writeVInt(buf, getTokenType().getNumber());
// Set s3 specific fields.
if (getTokenType().equals(S3AUTHINFO)) {
WritableUtils.writeString(buf, getAwsAccessId());
WritableUtils.writeString(buf, getSignature());
WritableUtils.writeString(buf, getStrToSign());
} else {
WritableUtils.writeString(buf, getOmCertSerialId());
WritableUtils.writeString(buf, getOmServiceId());
}
} catch (java.io.IOException e) {
throw new IllegalArgumentException(
"Can't encode the the raw data ", e);
}
return buf.getData();
}

/** Instead of relying on proto deserialization, this
* provides explicit deserialization for OzoneTokenIdentifier.
* @return byte[]
*/
public OzoneTokenIdentifier fromUniqueSerializedKey(byte[] rawData)
throws IOException {
DataInputBuffer in = new DataInputBuffer();
in.reset(rawData, rawData.length);
super.readFields(in);
int type = WritableUtils.readVInt(in);
// Set s3 specific fields.
if (type == S3AUTHINFO.getNumber()) {
this.tokenType = Type.S3AUTHINFO;
setAwsAccessId(WritableUtils.readString(in));
setSignature(WritableUtils.readString(in));
setStrToSign(WritableUtils.readString(in));
} else {
this.tokenType = Type.DELEGATION_TOKEN;
setOmCertSerialId(WritableUtils.readString(in));
setOmServiceId(WritableUtils.readString(in));
}
return this;
}

/**
* Overrides default implementation to write using Protobuf.
*
Expand All @@ -92,7 +145,6 @@ public void write(DataOutput out) throws IOException {
.setRealUser(getRealUser().toString())
.setRenewer(getRenewer().toString())
.setIssueDate(getIssueDate())
.setMaxDate(getMaxDate())
.setSequenceNumber(getSequenceNumber())
.setMasterKeyId(getMasterKeyId());

Expand Down Expand Up @@ -332,4 +384,4 @@ public String toString() {
.append(", omServiceId=").append(getOmServiceId());
return buffer.toString();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,12 @@

import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException;

import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
import org.apache.hadoop.hdds.utils.db.Codec;

import java.io.IOException;
import java.nio.BufferUnderflowException;

/**
* Codec to encode TokenIdentifierCodec as byte array.
Expand All @@ -33,7 +35,7 @@ public class TokenIdentifierCodec implements Codec<OzoneTokenIdentifier> {
public byte[] toPersistedFormat(OzoneTokenIdentifier object) {
Preconditions
.checkNotNull(object, "Null object can't be converted to byte array.");
return object.getBytes();
return object.toUniqueSerializedKey();
}

@Override
Expand All @@ -42,8 +44,16 @@ public OzoneTokenIdentifier fromPersistedFormat(byte[] rawData)
Preconditions.checkNotNull(rawData,
"Null byte array can't converted to real object.");
try {
return OzoneTokenIdentifier.readProtoBuf(rawData);
} catch (InvalidProtocolBufferException e) {
OzoneTokenIdentifier object = OzoneTokenIdentifier.newInstance();
return object.fromUniqueSerializedKey(rawData);
} catch (IOException ex) {
try {
return OzoneTokenIdentifier.readProtoBuf(rawData);
} catch (InvalidProtocolBufferException e) {
throw new IllegalArgumentException(
"Can't encode the the raw data from the byte array", e);
}
} catch (BufferUnderflowException e) {
throw new IllegalArgumentException(
"Can't encode the the raw data from the byte array", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ozone.om.codec.TokenIdentifierCodec;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.ssl.TestSSLFactory;
import org.apache.hadoop.security.token.Token;
Expand Down Expand Up @@ -327,4 +328,22 @@ public void testTokenSerialization() throws IOException {
idDecode.readFields(in);
Assert.assertEquals(idEncode, idDecode);
}

@Test
public void testTokenPersistence() throws IOException {
OzoneTokenIdentifier idWrite = getIdentifierInst();
idWrite.setOmServiceId("defaultServiceId");

byte[] oldIdBytes = idWrite.getBytes();
TokenIdentifierCodec idCodec = new TokenIdentifierCodec();

OzoneTokenIdentifier idRead = null;
try {
idRead = idCodec.fromPersistedFormat(oldIdBytes);
} catch (IOException ex) {
Assert.fail("Should not fail to load old token format");
}
Assert.assertEquals("Deserialize Serialized Token should equal.",
idWrite, idRead);
}
}