You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2020/01/16 17:58:06 UTC
[hadoop] branch trunk updated: HADOOP-16621. [pb-upgrade] Remove
Protobuf classes from signatures of Public APIs. Contributed by Vinayakumar
B. (#1803)
This is an automated email from the ASF dual-hosted git repository.
vinayakumarb pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new edbbc03 HADOOP-16621. [pb-upgrade] Remove Protobuf classes from signatures of Public APIs. Contributed by Vinayakumar B. (#1803)
edbbc03 is described below
commit edbbc03ce7d479f1b84d9209021e9d2822909cfe
Author: Vinayakumar B <vi...@apache.org>
AuthorDate: Thu Jan 16 23:27:50 2020 +0530
HADOOP-16621. [pb-upgrade] Remove Protobuf classes from signatures of Public APIs. Contributed by Vinayakumar B. (#1803)
---
.../dev-support/findbugsExcludeFile.xml | 6 ++
.../java/org/apache/hadoop/ipc/ProtobufHelper.java | 69 ++++++++++++++++++++++
.../org/apache/hadoop/security/Credentials.java | 5 +-
.../org/apache/hadoop/security/token/Token.java | 28 ---------
.../dev-support/findbugsExcludeFile.xml | 5 --
.../hadoop/hdfs/protocolPB/PBHelperClient.java | 45 ++------------
6 files changed, 84 insertions(+), 74 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index 802197e..cf5c387 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -460,4 +460,10 @@
<Method name="save"/>
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
</Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.ipc.ProtobufHelper" />
+ <Method name="getFixedByteString" />
+ <Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
+ </Match>
</FindBugsFilter>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
index e30f28a..105628f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
@@ -18,9 +18,15 @@
package org.apache.hadoop.ipc;
import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
/**
@@ -46,4 +52,67 @@ public class ProtobufHelper {
}
return e instanceof IOException ? (IOException) e : new IOException(se);
}
+
+
+ /**
+ * Map used to cache fixed strings to ByteStrings. Since there is no
+ * automatic expiration policy, only use this for strings from a fixed, small
+ * set.
+ * <p/>
+ * This map should not be accessed directly. Used the getFixedByteString
+ * methods instead.
+ */
+ private final static ConcurrentHashMap<Object, ByteString>
+ FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
+
+ /**
+ * Get the ByteString for frequently used fixed and small set strings.
+ * @param key string
+ * @return
+ */
+ public static ByteString getFixedByteString(Text key) {
+ ByteString value = FIXED_BYTESTRING_CACHE.get(key);
+ if (value == null) {
+ value = ByteString.copyFromUtf8(key.toString());
+ FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
+ }
+ return value;
+ }
+
+ /**
+ * Get the ByteString for frequently used fixed and small set strings.
+ * @param key string
+ * @return
+ */
+ public static ByteString getFixedByteString(String key) {
+ ByteString value = FIXED_BYTESTRING_CACHE.get(key);
+ if (value == null) {
+ value = ByteString.copyFromUtf8(key);
+ FIXED_BYTESTRING_CACHE.put(key, value);
+ }
+ return value;
+ }
+
+ public static ByteString getByteString(byte[] bytes) {
+ // return singleton to reduce object allocation
+ return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
+ }
+
+ public static Token<? extends TokenIdentifier> tokenFromProto(
+ TokenProto tokenProto) {
+ Token<? extends TokenIdentifier> token = new Token<>(
+ tokenProto.getIdentifier().toByteArray(),
+ tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()),
+ new Text(tokenProto.getService()));
+ return token;
+ }
+
+ public static TokenProto protoFromToken(Token<?> tok) {
+ TokenProto.Builder builder = TokenProto.newBuilder().
+ setIdentifier(getByteString(tok.getIdentifier())).
+ setPassword(getByteString(tok.getPassword())).
+ setKindBytes(getFixedByteString(tok.getKind())).
+ setServiceBytes(getFixedByteString(tok.getService()));
+ return builder.build();
+ }
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index 37cf021..de30b18 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
@@ -368,7 +369,7 @@ public class Credentials implements Writable {
CredentialsKVProto.Builder kv = CredentialsKVProto.newBuilder().
setAliasBytes(ByteString.copyFrom(
e.getKey().getBytes(), 0, e.getKey().getLength())).
- setToken(e.getValue().toTokenProto());
+ setToken(ProtobufHelper.protoFromToken(e.getValue()));
storage.addTokens(kv.build());
}
@@ -390,7 +391,7 @@ public class Credentials implements Writable {
CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
for (CredentialsKVProto kv : storage.getTokensList()) {
addToken(new Text(kv.getAliasBytes().toByteArray()),
- (Token<? extends TokenIdentifier>) new Token(kv.getToken()));
+ ProtobufHelper.tokenFromProto(kv.getToken()));
}
for (CredentialsKVProto kv : storage.getSecretsList()) {
addSecretKey(new Text(kv.getAliasBytes().toByteArray()),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
index 487dd46..4f0f6fc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.security.token;
import com.google.common.collect.Maps;
-import com.google.protobuf.ByteString;
import com.google.common.primitives.Bytes;
import org.apache.commons.codec.binary.Base64;
@@ -28,7 +27,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.*;
-import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -118,32 +116,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
}
/**
- * Construct a Token from a TokenProto.
- * @param tokenPB the TokenProto object
- */
- public Token(TokenProto tokenPB) {
- this.identifier = tokenPB.getIdentifier().toByteArray();
- this.password = tokenPB.getPassword().toByteArray();
- this.kind = new Text(tokenPB.getKindBytes().toByteArray());
- this.service = new Text(tokenPB.getServiceBytes().toByteArray());
- }
-
- /**
- * Construct a TokenProto from this Token instance.
- * @return a new TokenProto object holding copies of data in this instance
- */
- public TokenProto toTokenProto() {
- return TokenProto.newBuilder().
- setIdentifier(ByteString.copyFrom(this.getIdentifier())).
- setPassword(ByteString.copyFrom(this.getPassword())).
- setKindBytes(ByteString.copyFrom(
- this.getKind().getBytes(), 0, this.getKind().getLength())).
- setServiceBytes(ByteString.copyFrom(
- this.getService().getBytes(), 0, this.getService().getLength())).
- build();
- }
-
- /**
* Get the token identifier's byte representation.
* @return the token identifier's byte representation
*/
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
index 3d85a13..278d01d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
@@ -92,10 +92,5 @@
<Method name="getSymlinkInBytes" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
- <Match>
- <Class name="org.apache.hadoop.hdfs.protocolPB.PBHelperClient" />
- <Method name="getFixedByteString" />
- <Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
- </Match>
</FindBugsFilter>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
index 57b2f92..c439b40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
@@ -27,7 +27,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
@@ -207,6 +206,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.erasurecode.ECSchema;
+import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ChunkedArrayList;
@@ -232,33 +232,8 @@ public class PBHelperClient {
private static final FsAction[] FSACTION_VALUES =
FsAction.values();
- /**
- * Map used to cache fixed strings to ByteStrings. Since there is no
- * automatic expiration policy, only use this for strings from a fixed, small
- * set.
- * <p/>
- * This map should not be accessed directly. Used the getFixedByteString
- * methods instead.
- */
- private static ConcurrentHashMap<Object, ByteString> fixedByteStringCache =
- new ConcurrentHashMap<>();
-
- private static ByteString getFixedByteString(Text key) {
- ByteString value = fixedByteStringCache.get(key);
- if (value == null) {
- value = ByteString.copyFromUtf8(key.toString());
- fixedByteStringCache.put(new Text(key.copyBytes()), value);
- }
- return value;
- }
-
private static ByteString getFixedByteString(String key) {
- ByteString value = fixedByteStringCache.get(key);
- if (value == null) {
- value = ByteString.copyFromUtf8(key);
- fixedByteStringCache.put(key, value);
- }
- return value;
+ return ProtobufHelper.getFixedByteString(key);
}
/**
@@ -281,7 +256,7 @@ public class PBHelperClient {
public static ByteString getByteString(byte[] bytes) {
// return singleton to reduce object allocation
- return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
+ return ProtobufHelper.getByteString(bytes);
}
public static ShmId convert(ShortCircuitShmIdProto shmId) {
@@ -349,12 +324,7 @@ public class PBHelperClient {
}
public static TokenProto convert(Token<?> tok) {
- TokenProto.Builder builder = TokenProto.newBuilder().
- setIdentifier(getByteString(tok.getIdentifier())).
- setPassword(getByteString(tok.getPassword())).
- setKindBytes(getFixedByteString(tok.getKind())).
- setServiceBytes(getFixedByteString(tok.getService()));
- return builder.build();
+ return ProtobufHelper.protoFromToken(tok);
}
public static ShortCircuitShmIdProto convert(ShmId shmId) {
@@ -832,11 +802,8 @@ public class PBHelperClient {
public static Token<BlockTokenIdentifier> convert(
TokenProto blockToken) {
- Token<BlockTokenIdentifier> token =
- new Token<>(blockToken.getIdentifier()
- .toByteArray(), blockToken.getPassword().toByteArray(), new Text(
- blockToken.getKind()), new Text(blockToken.getService()));
- return token;
+ return (Token<BlockTokenIdentifier>) ProtobufHelper
+ .tokenFromProto(blockToken);
}
// DatanodeId
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org