You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cl...@apache.org on 2019/08/16 17:59:32 UTC

[hadoop] 04/04: HDFS-14611. Move handshake secret field from Token to BlockAccessToken. Contributed by Chen Liang.

This is an automated email from the ASF dual-hosted git repository.

cliang pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git

commit d8a6098a9684762f8a47802e518f2cdfada41d58
Author: Chen Liang <cl...@apache.org>
AuthorDate: Thu Jul 11 13:23:25 2019 -0700

    HDFS-14611. Move handshake secret field from Token to BlockAccessToken. Contributed by Chen Liang.
---
 .../org/apache/hadoop/security/token/Token.java    | 29 +++--------
 .../hadoop-common/src/main/proto/Security.proto    |  1 -
 .../datatransfer/sasl/SaslDataTransferClient.java  | 55 +++++++++++++-------
 .../hadoop/hdfs/protocolPB/PBHelperClient.java     | 12 ++---
 .../security/token/block/BlockTokenIdentifier.java | 32 ++++++++++++
 .../hadoop-hdfs-client/src/main/proto/hdfs.proto   |  1 +
 .../datatransfer/sasl/SaslDataTransferServer.java  | 28 +---------
 .../token/block/BlockTokenSecretManager.java       | 59 ++++++++++++----------
 .../hdfs/server/blockmanagement/BlockManager.java  |  8 ++-
 .../hdfs/server/namenode/NameNodeRpcServer.java    | 47 +----------------
 .../hadoop/hdfs/TestBlockTokenWrappingQOP.java     | 43 +++-------------
 11 files changed, 131 insertions(+), 184 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
index c8a10cc..6df62fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
@@ -56,7 +56,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
   private Text kind;
   private Text service;
   private TokenRenewer renewer;
-  private byte[] dnHandshakeSecret;
 
   /**
    * Construct a token given a token identifier and a secret manager for the
@@ -69,7 +68,14 @@ public class Token<T extends TokenIdentifier> implements Writable {
     identifier = id.getBytes();
     kind = id.getKind();
     service = new Text();
-    dnHandshakeSecret = new byte[0];
+  }
+
+  public void setID(byte[] bytes) {
+    identifier = bytes;
+  }
+
+  public void setPassword(byte[] newPassword) {
+    password = newPassword;
   }
 
   /**
@@ -84,7 +90,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     this.password = (password == null)? new byte[0] : password;
     this.kind = (kind == null)? new Text() : kind;
     this.service = (service == null)? new Text() : service;
-    this.dnHandshakeSecret = new byte[0];
   }
 
   /**
@@ -95,7 +100,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     password = new byte[0];
     kind = new Text();
     service = new Text();
-    dnHandshakeSecret = new byte[0];
   }
 
   /**
@@ -107,7 +111,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     this.password = other.password.clone();
     this.kind = new Text(other.kind);
     this.service = new Text(other.service);
-    this.dnHandshakeSecret = other.dnHandshakeSecret.clone();
   }
 
   public Token<T> copyToken() {
@@ -123,7 +126,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     this.password = tokenPB.getPassword().toByteArray();
     this.kind = new Text(tokenPB.getKindBytes().toByteArray());
     this.service = new Text(tokenPB.getServiceBytes().toByteArray());
-    this.dnHandshakeSecret = new byte[0];
   }
 
   /**
@@ -149,14 +151,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     return identifier;
   }
 
-  public byte[] getDnHandshakeSecret() {
-    return dnHandshakeSecret;
-  }
-
-  public void setDNHandshakeSecret(byte[] secret) {
-    this.dnHandshakeSecret = secret;
-  }
-
   private static Class<? extends TokenIdentifier>
       getClassForIdentifier(Text kind) {
     Class<? extends TokenIdentifier> cls = null;
@@ -351,11 +345,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     in.readFully(password);
     kind.readFields(in);
     service.readFields(in);
-    len = WritableUtils.readVInt(in);
-    if (dnHandshakeSecret == null || dnHandshakeSecret.length != len) {
-      dnHandshakeSecret = new byte[len];
-    }
-    in.readFully(dnHandshakeSecret);
   }
 
   @Override
@@ -366,8 +355,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
     out.write(password);
     kind.write(out);
     service.write(out);
-    WritableUtils.writeVInt(out, dnHandshakeSecret.length);
-    out.write(dnHandshakeSecret);
   }
 
   /**
diff --git a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
index 4cf4520..037a878 100644
--- a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
+++ b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
@@ -36,7 +36,6 @@ message TokenProto {
   required bytes password = 2;
   required string kind = 3;
   required string service = 4;
-  optional bytes handshakeSecret = 5;
 }
 
 message CredentialsKVProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
index 8d1c7f6..0adb41e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
@@ -329,9 +329,7 @@ public class SaslDataTransferClient {
     if (secretKey != null) {
       LOG.debug("DataNode overwriting downstream QOP" +
           saslProps.get(Sasl.QOP));
-      byte[] newSecret =  SecretManager.createPassword(saslProps.get(Sasl.QOP)
-          .getBytes(Charsets.UTF_8), secretKey);
-      accessToken.setDNHandshakeSecret(newSecret);
+      updateToken(accessToken, secretKey, saslProps);
     }
 
     LOG.debug("Client using encryption algorithm {}",
@@ -447,9 +445,7 @@ public class SaslDataTransferClient {
       }
       LOG.debug("DataNode overwriting downstream QOP " +
           saslProps.get(Sasl.QOP));
-      byte[] newSecret = SecretManager.createPassword(
-          saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8), secretKey);
-      accessToken.setDNHandshakeSecret(newSecret);
+      updateToken(accessToken, secretKey, saslProps);
     }
     targetQOP = saslProps.get(Sasl.QOP);
     String userName = buildUserName(accessToken);
@@ -460,6 +456,18 @@ public class SaslDataTransferClient {
         saslProps, callbackHandler, accessToken);
   }
 
+  private void updateToken(Token<BlockTokenIdentifier> accessToken,
+      SecretKey secretKey, Map<String, String> saslProps)
+      throws IOException {
+    byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8);
+    BlockTokenIdentifier bkid = accessToken.decodeIdentifier();
+    bkid.setHandshakeMsg(newSecret);
+    byte[] bkidBytes = bkid.getBytes();
+    accessToken.setPassword(
+        SecretManager.createPassword(bkidBytes, secretKey));
+    accessToken.setID(bkidBytes);
+  }
+
   /**
    * Builds the client's user name for the general-purpose handshake, consisting
    * of the base64-encoded serialized block access token identifier.  Note that
@@ -516,20 +524,29 @@ public class SaslDataTransferClient {
     try {
       // Start of handshake - "initial response" in SASL terminology.
       // The handshake secret can be null, this happens when client is running
-      // a new version but the cluster does not have this feature. In which case
-      // there will be no encrypted secret sent from NN.
-      byte[] handshakeSecret = accessToken.getDnHandshakeSecret();
-      if (handshakeSecret == null || handshakeSecret.length == 0) {
-        LOG.debug("Handshake secret is null, sending without "
-            + "handshake secret.");
-        sendSaslMessage(out, new byte[0]);
+      // a new version but the cluster does not have this feature.
+      // In which case there will be no encrypted secret sent from NN.
+      BlockTokenIdentifier blockTokenIdentifier =
+          accessToken.decodeIdentifier();
+      if (blockTokenIdentifier != null) {
+        byte[] handshakeSecret =
+            accessToken.decodeIdentifier().getHandshakeMsg();
+        if (handshakeSecret == null || handshakeSecret.length == 0) {
+          LOG.debug("Handshake secret is null, "
+              + "sending without handshake secret.");
+          sendSaslMessage(out, new byte[0]);
+        } else {
+          LOG.debug("Sending handshake secret.");
+          BlockTokenIdentifier identifier = new BlockTokenIdentifier();
+          identifier.readFields(new DataInputStream(
+              new ByteArrayInputStream(accessToken.getIdentifier())));
+          String bpid = identifier.getBlockPoolId();
+          sendSaslMessageHandshakeSecret(out, new byte[0],
+              handshakeSecret, bpid);
+        }
       } else {
-        LOG.debug("Sending handshake secret.");
-        BlockTokenIdentifier identifier = new BlockTokenIdentifier();
-        identifier.readFields(new DataInputStream(
-            new ByteArrayInputStream(accessToken.getIdentifier())));
-        String bpid = identifier.getBlockPoolId();
-        sendSaslMessageHandshakeSecret(out, new byte[0], handshakeSecret, bpid);
+        LOG.debug("Block token id is null, sending without handshake secret.");
+        sendSaslMessage(out, new byte[0]);
       }
 
       // step 1
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
index 829d3ef..3d43c97 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
@@ -354,10 +354,6 @@ public class PBHelperClient {
         setPassword(getByteString(tok.getPassword())).
         setKindBytes(getFixedByteString(tok.getKind())).
         setServiceBytes(getFixedByteString(tok.getService()));
-    if (tok.getDnHandshakeSecret() != null) {
-      builder.setHandshakeSecret(
-          ByteString.copyFrom(tok.getDnHandshakeSecret()));
-    }
     return builder.build();
   }
 
@@ -779,6 +775,11 @@ public class PBHelperClient {
     for (String storageId : blockTokenSecret.getStorageIds()) {
       builder.addStorageIds(storageId);
     }
+
+    byte[] handshake = blockTokenSecret.getHandshakeMsg();
+    if (handshake != null && handshake.length > 0) {
+      builder.setHandshakeSecret(getByteString(handshake));
+    }
     return builder.build();
   }
 
@@ -835,9 +836,6 @@ public class PBHelperClient {
         new Token<>(blockToken.getIdentifier()
         .toByteArray(), blockToken.getPassword().toByteArray(), new Text(
         blockToken.getKind()), new Text(blockToken.getService()));
-    if (blockToken.hasHandshakeSecret()) {
-      token.setDNHandshakeSecret(blockToken.getHandshakeSecret().toByteArray());
-    }
     return token;
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
index 0635fd0..a6b6073 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.security.token.block;
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
+import java.io.EOFException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.EnumSet;
@@ -55,6 +56,7 @@ public class BlockTokenIdentifier extends TokenIdentifier {
   private StorageType[] storageTypes;
   private String[] storageIds;
   private boolean useProto;
+  private byte[] handshakeMsg;
 
   private byte [] cache;
 
@@ -76,6 +78,7 @@ public class BlockTokenIdentifier extends TokenIdentifier {
     this.storageIds = Optional.ofNullable(storageIds)
                               .orElse(new String[0]);
     this.useProto = useProto;
+    this.handshakeMsg = new byte[0];
   }
 
   @Override
@@ -134,6 +137,14 @@ public class BlockTokenIdentifier extends TokenIdentifier {
     return storageIds;
   }
 
+  public byte[] getHandshakeMsg() {
+    return handshakeMsg;
+  }
+
+  public void setHandshakeMsg(byte[] bytes) {
+    handshakeMsg = bytes;
+  }
+
   @Override
   public String toString() {
     return "block_token_identifier (expiryDate=" + this.getExpiryDate()
@@ -241,6 +252,16 @@ public class BlockTokenIdentifier extends TokenIdentifier {
     storageIds = readStorageIds;
 
     useProto = false;
+
+    try {
+      int handshakeMsgLen = WritableUtils.readVInt(in);
+      if (handshakeMsgLen != 0) {
+        handshakeMsg = new byte[handshakeMsgLen];
+        in.readFully(handshakeMsg);
+      }
+    } catch (EOFException eof) {
+
+    }
   }
 
   @VisibleForTesting
@@ -271,6 +292,13 @@ public class BlockTokenIdentifier extends TokenIdentifier {
     storageIds = blockTokenSecretProto.getStorageIdsList().stream()
         .toArray(String[]::new);
     useProto = true;
+
+    if(blockTokenSecretProto.hasHandshakeSecret()) {
+      handshakeMsg = blockTokenSecretProto
+          .getHandshakeSecret().toByteArray();
+    } else {
+      handshakeMsg = new byte[0];
+    }
   }
 
   @Override
@@ -301,6 +329,10 @@ public class BlockTokenIdentifier extends TokenIdentifier {
     for (String id: storageIds) {
       WritableUtils.writeString(out, id);
     }
+    if (handshakeMsg != null && handshakeMsg.length > 0) {
+      WritableUtils.writeVInt(out, handshakeMsg.length);
+      out.write(handshakeMsg);
+    }
   }
 
   @VisibleForTesting
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
index 441b9d6..53bdf5e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
@@ -671,4 +671,5 @@ message BlockTokenSecretProto {
   repeated AccessModeProto modes = 6;
   repeated StorageTypeProto storageTypes = 7;
   repeated string storageIds = 8;
+  optional bytes handshakeSecret = 9;
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
index d162d9e..ae17761 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
@@ -28,11 +28,9 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import javax.crypto.SecretKey;
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
 import javax.security.auth.callback.NameCallback;
@@ -52,15 +50,12 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
 import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException;
 import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus;
-import org.apache.hadoop.hdfs.security.token.block.BlockKey;
 import org.apache.hadoop.hdfs.security.token.block.BlockPoolTokenSecretManager;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.server.datanode.DNConf;
 import org.apache.hadoop.security.SaslPropertiesResolver;
-import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.SecretManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -348,21 +343,6 @@ public class SaslDataTransferServer {
     return identifier;
   }
 
-  private String examineSecret(byte[] secret, String bpid) {
-    BlockKey blockKey = blockPoolTokenSecretManager.get(bpid).getCurrentKey();
-    SecretKey secretKey = blockKey.getKey();
-    for (SaslRpcServer.QualityOfProtection qop :
-        SaslRpcServer.QualityOfProtection.values()) {
-      String qopString = qop.getSaslQop();
-      byte[] data = qopString.getBytes(Charsets.UTF_8);
-      byte[] encryptedData = SecretManager.createPassword(data, secretKey);
-      if (Arrays.equals(encryptedData, secret)) {
-        return qopString;
-      }
-    }
-    return null;
-  }
-
   @VisibleForTesting
   public String getNegotiatedQOP() {
     return negotiatedQOP;
@@ -399,12 +379,8 @@ public class SaslDataTransferServer {
       if (secret != null || bpid != null) {
         // sanity check, if one is null, the other must also not be null
         assert(secret != null && bpid != null);
-        String qop = examineSecret(secret, bpid);
-        if (qop != null) {
-          saslProps.put(Sasl.QOP, qop);
-        } else {
-          LOG.error("Unable to match secret to a QOP!");
-        }
+        String qop = new String(secret, Charsets.UTF_8);
+        saslProps.put(Sasl.QOP, qop);
       }
       SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(
           saslProps, callbackHandler);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
index 757cfb0..57f84ea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hdfs.security.token.block;
 
+import com.google.common.base.Charsets;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
@@ -29,6 +30,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.commons.lang3.ArrayUtils;
+import org.apache.hadoop.ipc.Server;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -94,6 +96,8 @@ public class BlockTokenSecretManager extends
 
   private final boolean useProto;
 
+  private final boolean shouldWrapQOP;
+
   private final SecureRandom nonceGenerator = new SecureRandom();
 
   /**
@@ -112,7 +116,25 @@ public class BlockTokenSecretManager extends
       long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
       boolean useProto) {
     this(false, keyUpdateInterval, tokenLifetime, blockPoolId,
-        encryptionAlgorithm, 0, 1, useProto);
+        encryptionAlgorithm, 0, 1, useProto, false);
+  }
+
+  public BlockTokenSecretManager(long keyUpdateInterval,
+      long tokenLifetime, int nnIndex, int numNNs, String blockPoolId,
+      String encryptionAlgorithm, boolean useProto) {
+    this(keyUpdateInterval, tokenLifetime, nnIndex, numNNs,
+        blockPoolId, encryptionAlgorithm, useProto, false);
+  }
+
+  public BlockTokenSecretManager(long keyUpdateInterval,
+      long tokenLifetime, int nnIndex, int numNNs,  String blockPoolId,
+      String encryptionAlgorithm, boolean useProto, boolean shouldWrapQOP) {
+    this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
+        encryptionAlgorithm, nnIndex, numNNs, useProto, shouldWrapQOP);
+    Preconditions.checkArgument(nnIndex >= 0);
+    Preconditions.checkArgument(numNNs > 0);
+    setSerialNo(new SecureRandom().nextInt());
+    generateKeys();
   }
 
   /**
@@ -125,21 +147,11 @@ public class BlockTokenSecretManager extends
    * @param encryptionAlgorithm encryption algorithm to use
    * @param numNNs number of namenodes possible
    * @param useProto should we use new protobuf style tokens
+   * @param shouldWrapQOP should wrap QOP in the block access token
    */
-  public BlockTokenSecretManager(long keyUpdateInterval,
-      long tokenLifetime, int nnIndex, int numNNs,  String blockPoolId,
-      String encryptionAlgorithm, boolean useProto) {
-    this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
-        encryptionAlgorithm, nnIndex, numNNs, useProto);
-    Preconditions.checkArgument(nnIndex >= 0);
-    Preconditions.checkArgument(numNNs > 0);
-    setSerialNo(new SecureRandom().nextInt());
-    generateKeys();
-  }
-
   private BlockTokenSecretManager(boolean isMaster, long keyUpdateInterval,
       long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
-      int nnIndex, int numNNs, boolean useProto) {
+      int nnIndex, int numNNs, boolean useProto, boolean shouldWrapQOP) {
     this.nnIndex = nnIndex;
     this.isMaster = isMaster;
     this.keyUpdateInterval = keyUpdateInterval;
@@ -148,6 +160,7 @@ public class BlockTokenSecretManager extends
     this.blockPoolId = blockPoolId;
     this.encryptionAlgorithm = encryptionAlgorithm;
     this.useProto = useProto;
+    this.shouldWrapQOP = shouldWrapQOP;
     this.timer = new Timer();
     generateKeys();
   }
@@ -277,10 +290,16 @@ public class BlockTokenSecretManager extends
   /** Generate a block token for a specified user */
   public Token<BlockTokenIdentifier> generateToken(String userId,
       ExtendedBlock block, EnumSet<BlockTokenIdentifier.AccessMode> modes,
-      StorageType[] storageTypes, String[] storageIds) throws IOException {
+      StorageType[] storageTypes, String[] storageIds) {
     BlockTokenIdentifier id = new BlockTokenIdentifier(userId, block
         .getBlockPoolId(), block.getBlockId(), modes, storageTypes,
         storageIds, useProto);
+    if (shouldWrapQOP) {
+      String qop = Server.getEstablishedQOP();
+      if (qop != null) {
+        id.setHandshakeMsg(qop.getBytes(Charsets.UTF_8));
+      }
+    }
     return new Token<BlockTokenIdentifier>(id, this);
   }
 
@@ -523,18 +542,6 @@ public class BlockTokenSecretManager extends
     return createPassword(nonce, key.getKey());
   }
 
-  /**
-   * Encrypt the given message with the current block key, using the current
-   * block key.
-   *
-   * @param message the message to be encrypted.
-   * @return the secret created by encrypting the given message.
-   */
-  public byte[] secretGen(byte[] message) {
-    return createPassword(message, currentKey.getKey());
-  }
-
-  @VisibleForTesting
   public BlockKey getCurrentKey() {
     return currentKey;
   }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 9a6f1d1..50f6454 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
+import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
 import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
 import static org.apache.hadoop.hdfs.protocol.BlockType.STRIPED;
 import static org.apache.hadoop.util.ExitUtil.terminate;
@@ -624,6 +625,9 @@ public class BlockManager implements BlockStatsMXBean {
         DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE,
         DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE_DEFAULT);
 
+    boolean shouldWrapQOP = conf.getBoolean(
+        DFS_NAMENODE_SEND_QOP_ENABLED, DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT);
+
     if (isHaEnabled) {
       // figure out which index we are of the nns
       Collection<String> nnIds = DFSUtilClient.getNameNodeIds(conf, nsId);
@@ -637,11 +641,11 @@ public class BlockManager implements BlockStatsMXBean {
       }
       return new BlockTokenSecretManager(updateMin * 60 * 1000L,
           lifetimeMin * 60 * 1000L, nnIndex, nnIds.size(), null,
-          encryptionAlgorithm, shouldWriteProtobufToken);
+          encryptionAlgorithm, shouldWriteProtobufToken, shouldWrapQOP);
     } else {
       return new BlockTokenSecretManager(updateMin*60*1000L,
           lifetimeMin*60*1000L, 0, 1, null, encryptionAlgorithm,
-          shouldWriteProtobufToken);
+          shouldWriteProtobufToken, shouldWrapQOP);
     }
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
index 747bb92..44f397b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
@@ -27,13 +27,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_HANDLER
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SEND_QOP_ENABLED;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT;
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_DEPTH;
 import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_LENGTH;
 import static org.apache.hadoop.util.Time.now;
 
-import com.google.common.base.Charsets;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.InetSocketAddress;
@@ -147,8 +144,6 @@ import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
 import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
 import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolPB;
 import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolServerSideTranslatorPB;
-import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
-import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
 import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -269,8 +264,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
 
   private final String defaultECPolicyName;
 
-  private final boolean shouldSendQOP;
-
   public NameNodeRpcServer(Configuration conf, NameNode nn)
       throws IOException {
     this.nn = nn;
@@ -553,8 +546,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
         this.clientRpcServer.addAuxiliaryListener(auxiliaryPort);
       }
     }
-    this.shouldSendQOP = conf.getBoolean(
-        DFS_NAMENODE_SEND_QOP_ENABLED, DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT);
   }
 
   /** Allow access to the lifeline RPC server for testing */
@@ -762,11 +753,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
     metrics.incrGetBlockLocations();
     LocatedBlocks locatedBlocks =
         namesystem.getBlockLocations(getClientMachine(), src, offset, length);
-    if (shouldSendQOP) {
-      for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) {
-        wrapEstablishedQOP(lb, getEstablishedClientQOP());
-      }
-    }
     return locatedBlocks;
   }
   
@@ -840,9 +826,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
       RetryCache.setState(cacheEntry, success, info);
     }
     metrics.incrFilesAppended();
-    if (shouldSendQOP) {
-      wrapEstablishedQOP(info.getLastBlock(), getEstablishedClientQOP());
-    }
     return info;
   }
 
@@ -911,9 +894,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
     if (locatedBlock != null) {
       metrics.incrAddBlockOps();
     }
-    if (shouldSendQOP) {
-      wrapEstablishedQOP(locatedBlock, getEstablishedClientQOP());
-    }
     return locatedBlock;
   }
 
@@ -947,9 +927,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
     LocatedBlock locatedBlock = namesystem.getAdditionalDatanode(src, fileId,
         blk, existings, existingStorageIDs, excludeSet, numAdditionalNodes,
         clientName);
-    if (shouldSendQOP) {
-      wrapEstablishedQOP(locatedBlock, getEstablishedClientQOP());
-    }
     return locatedBlock;
   }
   /**
@@ -1877,7 +1854,7 @@ public class NameNodeRpcServer implements NamenodeProtocols {
    *
    * @return the established QOP of this client.
    */
-  private static String getEstablishedClientQOP() {
+  public static String getEstablishedClientQOP() {
     return Server.getEstablishedQOP();
   }
 
@@ -2631,26 +2608,4 @@ public class NameNodeRpcServer implements NamenodeProtocols {
     }
     return namesystem.getBlockManager().getSPSManager().getNextPathId();
   }
-
-
-  /**
-   * Wrapping the QOP information into the LocatedBlock instance.
-   * The wrapped QOP will be used by DataNode, i.e. DataNode will simply use
-   * this QOP to accept client calls, because this this QOP is viewed
-   * as the QOP that NameNode has accepted.
-   *
-   * @param locatedBlock the LocatedBlock instance
-   * @param qop the QOP to wrap in
-   * @throws RuntimeException
-   */
-  private void wrapEstablishedQOP(LocatedBlock locatedBlock, String qop) {
-    if (qop == null || locatedBlock == null) {
-      return;
-    }
-    BlockTokenSecretManager btsm = namesystem.getBlockManager()
-        .getBlockTokenSecretManager();
-    Token<BlockTokenIdentifier> token = locatedBlock.getBlockToken();
-    byte[] secret = btsm.secretGen(qop.getBytes(Charsets.UTF_8));
-    token.setDNHandshakeSecret(secret);
-  }
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockTokenWrappingQOP.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockTokenWrappingQOP.java
index ea7ab97..94b80e6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockTokenWrappingQOP.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockTokenWrappingQOP.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.EnumSet;
-import javax.crypto.Mac;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.CreateFlag;
@@ -32,7 +31,6 @@ import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferTestCase;
-import org.apache.hadoop.hdfs.security.token.block.BlockKey;
 import org.apache.hadoop.io.EnumSetWritable;
 import org.apache.hadoop.security.TestPermission;
 import org.junit.After;
@@ -55,7 +53,6 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
 
   private HdfsConfiguration conf;
   private MiniDFSCluster cluster;
-  private String encryptionAlgorithm;
   private DistributedFileSystem dfs;
 
   private String configKey;
@@ -84,7 +81,6 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
     conf.setBoolean(DFS_NAMENODE_SEND_QOP_ENABLED, true);
     conf.set(HADOOP_RPC_PROTECTION, this.configKey);
     cluster = null;
-    encryptionAlgorithm = "HmacSHA1";
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
     cluster.waitActive();
   }
@@ -109,12 +105,8 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
 
     LocatedBlock lb = client.namenode.addBlock(src, clientName, null, null,
         HdfsConstants.GRANDFATHER_INODE_ID, null, null);
-    byte[] secret = lb.getBlockToken().getDnHandshakeSecret();
-    BlockKey currentKey = cluster.getNamesystem().getBlockManager()
-        .getBlockTokenSecretManager().getCurrentKey();
-    String decrypted = decryptMessage(secret, currentKey,
-        encryptionAlgorithm);
-    assertEquals(this.qopValue, decrypted);
+    byte[] secret = lb.getBlockToken().decodeIdentifier().getHandshakeMsg();
+    assertEquals(this.qopValue, new String(secret));
   }
 
   @Test
@@ -137,12 +129,8 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
         new EnumSetWritable<>(EnumSet.of(CreateFlag.APPEND)));
 
     byte[] secret = lastBlock.getLastBlock().getBlockToken()
-        .getDnHandshakeSecret();
-    BlockKey currentKey = cluster.getNamesystem().getBlockManager()
-        .getBlockTokenSecretManager().getCurrentKey();
-    String decrypted = decryptMessage(secret, currentKey,
-        encryptionAlgorithm);
-    assertEquals(this.qopValue, decrypted);
+        .decodeIdentifier().getHandshakeMsg();
+    assertEquals(this.qopValue, new String(secret));
   }
 
   @Test
@@ -164,27 +152,10 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
 
     assertTrue(lbs.getLocatedBlocks().size() > 0);
 
-    BlockKey currentKey = cluster.getNamesystem().getBlockManager()
-        .getBlockTokenSecretManager().getCurrentKey();
     for (LocatedBlock lb : lbs.getLocatedBlocks()) {
-      byte[] secret = lb.getBlockToken().getDnHandshakeSecret();
-      String decrypted = decryptMessage(secret, currentKey,
-          encryptionAlgorithm);
-      assertEquals(this.qopValue, decrypted);
+      byte[] secret = lb.getBlockToken()
+          .decodeIdentifier().getHandshakeMsg();
+      assertEquals(this.qopValue, new String(secret));
     }
   }
-
-  private String decryptMessage(byte[] secret, BlockKey key,
-      String algorithm) throws Exception {
-    String[] qops = {"auth", "auth-conf", "auth-int"};
-    Mac mac = Mac.getInstance(algorithm);
-    mac.init(key.getKey());
-    for (String qop : qops) {
-      byte[] encrypted = mac.doFinal(qop.getBytes());
-      if (Arrays.equals(encrypted, secret)) {
-        return qop;
-      }
-    }
-    return null;
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org