You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by xy...@apache.org on 2020/07/16 20:58:57 UTC

[hadoop-ozone] branch ozone-0.6.0 updated: HDDS-3926. OM Token Identifier table should use in-house serialization. (#1182)

This is an automated email from the ASF dual-hosted git repository.

xyao pushed a commit to branch ozone-0.6.0
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git


The following commit(s) were added to refs/heads/ozone-0.6.0 by this push:
     new ca9b368  HDDS-3926. OM Token Identifier table should use in-house serialization. (#1182)
ca9b368 is described below

commit ca9b368175bdb06ccb1ebd2bd93e6f33bfffc3f2
Author: prashantpogde <pr...@gmail.com>
AuthorDate: Thu Jul 16 13:56:29 2020 -0700

    HDDS-3926. OM Token Identifier table should use in-house serialization. (#1182)
    
    
    (cherry picked from commit 7e37f7b0aecda2aa8c6f4e3eccf8d6ac20e9f69e)
---
 .../ozone/security/OzoneTokenIdentifier.java       | 56 +++++++++++++++++++++-
 .../ozone/om/codec/TokenIdentifierCodec.java       | 16 +++++--
 .../ozone/security/TestOzoneTokenIdentifier.java   | 19 ++++++++
 3 files changed, 86 insertions(+), 5 deletions(-)

diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
index 290dd1d..c0b1ddb 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
@@ -26,13 +26,17 @@ import java.util.Arrays;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.hadoop.hdds.annotation.InterfaceAudience;
 import org.apache.hadoop.hdds.annotation.InterfaceStability;
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 
 import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3AUTHINFO;
 
+
 /**
  * The token identifier for Ozone Master.
  */
@@ -77,6 +81,55 @@ public class OzoneTokenIdentifier extends
     return KIND_NAME;
   }
 
+  /** Instead of relying on proto serialization, this
+   *  provides  explicit serialization for OzoneTokenIdentifier.
+   * @return byte[]
+   */
+  public byte[] toUniqueSerializedKey() {
+    DataOutputBuffer buf = new DataOutputBuffer();
+    try {
+      super.write(buf);
+      WritableUtils.writeVInt(buf, getTokenType().getNumber());
+      // Set s3 specific fields.
+      if (getTokenType().equals(S3AUTHINFO)) {
+        WritableUtils.writeString(buf, getAwsAccessId());
+        WritableUtils.writeString(buf, getSignature());
+        WritableUtils.writeString(buf, getStrToSign());
+      } else {
+        WritableUtils.writeString(buf, getOmCertSerialId());
+        WritableUtils.writeString(buf, getOmServiceId());
+      }
+    } catch (java.io.IOException e) {
+      throw new IllegalArgumentException(
+          "Can't encode the the raw data ", e);
+    }
+    return buf.getData();
+  }
+
+  /** Instead of relying on proto deserialization, this
+   *  provides  explicit deserialization for OzoneTokenIdentifier.
+   * @return byte[]
+   */
+  public OzoneTokenIdentifier fromUniqueSerializedKey(byte[] rawData)
+      throws IOException {
+    DataInputBuffer in = new DataInputBuffer();
+    in.reset(rawData, rawData.length);
+    super.readFields(in);
+    int type = WritableUtils.readVInt(in);
+    // Set s3 specific fields.
+    if (type == S3AUTHINFO.getNumber()) {
+      this.tokenType = Type.S3AUTHINFO;
+      setAwsAccessId(WritableUtils.readString(in));
+      setSignature(WritableUtils.readString(in));
+      setStrToSign(WritableUtils.readString(in));
+    } else {
+      this.tokenType = Type.DELEGATION_TOKEN;
+      setOmCertSerialId(WritableUtils.readString(in));
+      setOmServiceId(WritableUtils.readString(in));
+    }
+    return this;
+  }
+
   /**
    * Overrides default implementation to write using Protobuf.
    *
@@ -92,7 +145,6 @@ public class OzoneTokenIdentifier extends
         .setRealUser(getRealUser().toString())
         .setRenewer(getRenewer().toString())
         .setIssueDate(getIssueDate())
-        .setMaxDate(getMaxDate())
         .setSequenceNumber(getSequenceNumber())
         .setMasterKeyId(getMasterKeyId());
 
@@ -332,4 +384,4 @@ public class OzoneTokenIdentifier extends
         .append(", omServiceId=").append(getOmServiceId());
     return buffer.toString();
   }
-}
\ No newline at end of file
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
index 22656d8..592cae3 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
@@ -19,10 +19,12 @@ package org.apache.hadoop.ozone.om.codec;
 
 import com.google.common.base.Preconditions;
 import com.google.protobuf.InvalidProtocolBufferException;
+
 import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
 import org.apache.hadoop.hdds.utils.db.Codec;
 
 import java.io.IOException;
+import java.nio.BufferUnderflowException;
 
 /**
  * Codec to encode TokenIdentifierCodec as byte array.
@@ -33,7 +35,7 @@ public class TokenIdentifierCodec implements Codec<OzoneTokenIdentifier> {
   public byte[] toPersistedFormat(OzoneTokenIdentifier object) {
     Preconditions
         .checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getBytes();
+    return object.toUniqueSerializedKey();
   }
 
   @Override
@@ -42,8 +44,16 @@ public class TokenIdentifierCodec implements Codec<OzoneTokenIdentifier> {
     Preconditions.checkNotNull(rawData,
         "Null byte array can't converted to real object.");
     try {
-      return OzoneTokenIdentifier.readProtoBuf(rawData);
-    } catch (InvalidProtocolBufferException e) {
+      OzoneTokenIdentifier object = OzoneTokenIdentifier.newInstance();
+      return object.fromUniqueSerializedKey(rawData);
+    } catch (IOException ex) {
+      try {
+        return OzoneTokenIdentifier.readProtoBuf(rawData);
+      } catch (InvalidProtocolBufferException e) {
+        throw new IllegalArgumentException(
+            "Can't encode the the raw data from the byte array", e);
+      }
+    } catch (BufferUnderflowException e) {
       throw new IllegalArgumentException(
           "Can't encode the the raw data from the byte array", e);
     }
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
index 518953f..391759a 100644
--- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ozone.om.codec.TokenIdentifierCodec;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.ssl.TestSSLFactory;
 import org.apache.hadoop.security.token.Token;
@@ -327,4 +328,22 @@ public class TestOzoneTokenIdentifier {
     idDecode.readFields(in);
     Assert.assertEquals(idEncode, idDecode);
   }
+
+  @Test
+  public void testTokenPersistence() throws IOException {
+    OzoneTokenIdentifier idWrite = getIdentifierInst();
+    idWrite.setOmServiceId("defaultServiceId");
+
+    byte[] oldIdBytes = idWrite.getBytes();
+    TokenIdentifierCodec idCodec = new TokenIdentifierCodec();
+
+    OzoneTokenIdentifier idRead = null;
+    try {
+      idRead =  idCodec.fromPersistedFormat(oldIdBytes);
+    } catch (IOException ex) {
+      Assert.fail("Should not fail to load old token format");
+    }
+    Assert.assertEquals("Deserialize Serialized Token should equal.",
+        idWrite, idRead);
+  }
 }
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org