You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2015/08/07 18:13:59 UTC
[3/6] hbase git commit: HBASE-13825 Use ProtobufUtil#mergeFrom and
ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name
HBASE-13825 Use ProtobufUtil#mergeFrom and ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name
Incorporates HBASE-14076
Conflicts:
hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java
hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0ac0e934
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0ac0e934
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0ac0e934
Branch: refs/heads/branch-1.0
Commit: 0ac0e934e4891e670103b40cdae2b4dba6f0f0c2
Parents: cb4b395
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Aug 6 21:48:05 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Aug 6 21:48:05 2015 -0700
----------------------------------------------------------------------
.../java/org/apache/hadoop/hbase/ClusterId.java | 7 +-
.../apache/hadoop/hbase/HColumnDescriptor.java | 6 +-
.../org/apache/hadoop/hbase/HRegionInfo.java | 10 +-
.../apache/hadoop/hbase/HTableDescriptor.java | 7 +-
.../apache/hadoop/hbase/RegionTransition.java | 12 +-
.../org/apache/hadoop/hbase/client/HTable.java | 9 +-
.../hbase/ipc/MasterCoprocessorRpcChannel.java | 5 +-
.../hbase/ipc/RegionCoprocessorRpcChannel.java | 5 +-
.../ipc/RegionServerCoprocessorRpcChannel.java | 5 +-
.../apache/hadoop/hbase/ipc/RpcClientImpl.java | 5 +-
.../hadoop/hbase/protobuf/ProtobufUtil.java | 111 +++++++++++++++++-
.../replication/ReplicationPeerZKImpl.java | 7 +-
.../replication/ReplicationPeersZKImpl.java | 6 +-
.../token/AuthenticationTokenIdentifier.java | 7 +-
.../zookeeper/ZKTableStateClientSideReader.java | 9 +-
.../apache/hadoop/hbase/zookeeper/ZKUtil.java | 32 ++++--
.../org/apache/hadoop/hbase/types/PBCell.java | 2 +
.../hadoop/hbase/rest/model/CellModel.java | 3 +-
.../hadoop/hbase/rest/model/CellSetModel.java | 3 +-
.../hadoop/hbase/rest/model/ScannerModel.java | 3 +-
.../rest/model/StorageClusterStatusModel.java | 3 +-
.../hadoop/hbase/rest/model/TableInfoModel.java | 3 +-
.../hadoop/hbase/rest/model/TableListModel.java | 3 +-
.../hbase/rest/model/TableSchemaModel.java | 3 +-
.../hadoop/hbase/rest/model/VersionModel.java | 3 +-
.../org/apache/hadoop/hbase/SplitLogTask.java | 12 +-
.../org/apache/hadoop/hbase/ipc/RpcServer.java | 10 +-
.../hbase/mapreduce/MutationSerialization.java | 6 +-
.../hbase/mapreduce/ResultSerialization.java | 4 +-
.../hadoop/hbase/master/MasterRpcServices.java | 5 +-
.../hadoop/hbase/master/TableLockManager.java | 10 +-
.../hadoop/hbase/regionserver/HRegion.java | 5 +-
.../hbase/regionserver/HRegionServer.java | 6 +-
.../regionserver/wal/ProtobufLogReader.java | 11 +-
.../security/access/AccessControlLists.java | 21 ++--
.../hbase/security/access/AccessController.java | 10 +-
.../HbaseObjectWritableFor96Migration.java | 3 +-
.../security/visibility/VisibilityUtils.java | 18 ++-
.../org/apache/hadoop/hbase/util/FSUtils.java | 8 +-
.../hbase/zookeeper/LoadBalancerTracker.java | 8 +-
.../hbase/zookeeper/RegionServerTracker.java | 2 +-
.../hbase/zookeeper/ZKTableStateManager.java | 8 +-
.../hbase/mapreduce/TestSerialization.java | 115 +++++++++++++++++++
43 files changed, 391 insertions(+), 140 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
index 5be224c..9056d3e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
@@ -18,13 +18,13 @@
package org.apache.hadoop.hbase;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos;
import org.apache.hadoop.hbase.util.Bytes;
+import java.io.IOException;
import java.util.UUID;
/**
@@ -66,8 +66,9 @@ public class ClusterId {
ClusterIdProtos.ClusterId.Builder builder = ClusterIdProtos.ClusterId.newBuilder();
ClusterIdProtos.ClusterId cid = null;
try {
- cid = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ cid = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return convert(cid);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 401e0da..d67528a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.io.WritableComparable;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.ByteStringer;
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* An HColumnDescriptor contains information about a column family such as the
@@ -1352,8 +1351,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
ColumnFamilySchema cfs = null;
try {
- cfs = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ cfs = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return convert(cfs);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index ca0f540..c53ecce 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -47,8 +47,6 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.io.DataInputBuffer;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* Information about a region. A region is a range of keys in the whole keyspace of a table, an
* identifier (a timestamp) for differentiating between subset ranges (after region split)
@@ -1100,11 +1098,11 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
- HBaseProtos.RegionInfo ri =
- HBaseProtos.RegionInfo.newBuilder().
- mergeFrom(bytes, pblen + offset, len - pblen).build();
+ HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
+ ProtobufUtil.mergeFrom(builder, bytes, pblen + offset, len - pblen);
+ HBaseProtos.RegionInfo ri = builder.build();
return convert(ri);
- } catch (InvalidProtocolBufferException e) {
+ } catch (IOException e) {
throw new DeserializationException(e);
}
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 9dfb374..6de99f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -55,8 +55,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.WritableComparable;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* HTableDescriptor contains the details about an HBase table such as the descriptors of
* all the column families, is the table a catalog table, <code> -ROOT- </code> or
@@ -1487,8 +1485,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
TableSchema.Builder builder = TableSchema.newBuilder();
TableSchema ts;
try {
- ts = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ ts = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return convert(ts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
index c74e11f..f367718 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
@@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase;
+import java.io.IOException;
+
import org.apache.hadoop.hbase.util.ByteStringer;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -121,10 +122,11 @@ public class RegionTransition {
ProtobufUtil.expectPBMagicPrefix(data);
try {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
- ZooKeeperProtos.RegionTransition rt = ZooKeeperProtos.RegionTransition.newBuilder().
- mergeFrom(data, prefixLen, data.length - prefixLen).build();
- return new RegionTransition(rt);
- } catch (InvalidProtocolBufferException e) {
+ ZooKeeperProtos.RegionTransition.Builder builder =
+ ZooKeeperProtos.RegionTransition.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
+ return new RegionTransition(builder.build());
+ } catch (IOException e) {
throw new DeserializationException(e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index 9039b9a..418183e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.hbase.util.Threads;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors;
-import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
@@ -1904,10 +1903,10 @@ public class HTable implements HTableInterface, RegionLocator {
", value=" + serviceResult.getValue().getValue());
}
try {
- callback.update(region, row,
- (R) responsePrototype.newBuilderForType().mergeFrom(
- serviceResult.getValue().getValue()).build());
- } catch (InvalidProtocolBufferException e) {
+ Message.Builder builder = responsePrototype.newBuilderForType();
+ ProtobufUtil.mergeFrom(builder, serviceResult.getValue().getValue());
+ callback.update(region, row, (R) builder.build());
+ } catch (IOException e) {
LOG.error("Unexpected response type from endpoint " + methodDescriptor.getFullName(),
e);
callbackErrorExceptions.add(e);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
index 118a895..b3d6b41 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
@@ -68,8 +68,9 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call);
Message response = null;
if (result.getValue().hasValue()) {
- response = responsePrototype.newBuilderForType()
- .mergeFrom(result.getValue().getValue()).build();
+ Message.Builder builder = responsePrototype.newBuilderForType();
+ ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
+ response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
index cabe5e6..c984797 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
@@ -96,8 +96,9 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
.callWithRetries(callable, operationTimeout);
Message response = null;
if (result.getValue().hasValue()) {
- response = responsePrototype.newBuilderForType()
- .mergeFrom(result.getValue().getValue()).build();
+ Message.Builder builder = responsePrototype.newBuilderForType();
+ ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
+ response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
index 76d7f9f..1d5e215 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
@@ -62,8 +62,9 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
Message response = null;
if (result.getValue().hasValue()) {
- response =
- responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build();
+ Message.Builder builder = responsePrototype.newBuilderForType();
+ ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
+ response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
index 4b70452..1913027 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
@@ -23,6 +23,7 @@ import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message;
import com.google.protobuf.Message.Builder;
import com.google.protobuf.RpcCallback;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader;
@@ -66,6 +68,7 @@ import org.apache.htrace.TraceScope;
import javax.net.SocketFactory;
import javax.security.sasl.SaslException;
+
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.Closeable;
@@ -950,7 +953,7 @@ public class RpcClientImpl extends AbstractRpcClient {
Message value = null;
if (call.responseDefaultType != null) {
Builder builder = call.responseDefaultType.newBuilderForType();
- builder.mergeDelimitedFrom(in);
+ ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 56d9a64..db7b074 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpeci
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -66,6 +67,7 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
@@ -146,6 +148,7 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
+import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
@@ -2730,8 +2733,9 @@ public final class ProtobufUtil {
ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder();
ClientProtos.CellVisibility proto = null;
try {
- proto = builder.mergeFrom(protoBytes).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, protoBytes);
+ proto = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return toCellVisibility(proto);
@@ -2772,8 +2776,9 @@ public final class ProtobufUtil {
ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder();
ClientProtos.Authorizations proto = null;
try {
- proto = builder.mergeFrom(protoBytes).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, protoBytes);
+ proto = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return toAuthorizations(proto);
@@ -2832,6 +2837,104 @@ public final class ProtobufUtil {
return result;
}
+ /**
+ * This version of protobuf's mergeDelimitedFrom avoids the hard-coded 64MB limit for decoding
+ * buffers
+ * @param builder current message builder
+ * @param in Inputsream with delimited protobuf data
+ * @throws IOException
+ */
+ public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
+ throws IOException {
+ // This used to be builder.mergeDelimitedFrom(in);
+ // but is replaced to allow us to bump the protobuf size limit.
+ final int firstByte = in.read();
+ if (firstByte != -1) {
+ final int size = CodedInputStream.readRawVarint32(firstByte, in);
+ final InputStream limitedInput = new LimitInputStream(in, size);
+ final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
+ codedInput.setSizeLimit(size);
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+ }
+
+ /**
+ * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
+ * buffers where the message size is known
+ * @param builder current message builder
+ * @param in InputStream containing protobuf data
+ * @param size known size of protobuf data
+ * @throws IOException
+ */
+ public static void mergeFrom(Message.Builder builder, InputStream in, int size)
+ throws IOException {
+ final CodedInputStream codedInput = CodedInputStream.newInstance(in);
+ codedInput.setSizeLimit(size);
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+
+ /**
+ * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
+ * buffers where the message size is not known
+ * @param builder current message builder
+ * @param in InputStream containing protobuf data
+ * @throws IOException
+ */
+ public static void mergeFrom(Message.Builder builder, InputStream in)
+ throws IOException {
+ final CodedInputStream codedInput = CodedInputStream.newInstance(in);
+ codedInput.setSizeLimit(Integer.MAX_VALUE);
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+
+ /**
+ * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
+ * buffers when working with ByteStrings
+ * @param builder current message builder
+ * @param bs ByteString containing the
+ * @throws IOException
+ */
+ public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException {
+ final CodedInputStream codedInput = bs.newCodedInput();
+ codedInput.setSizeLimit(bs.size());
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+
+ /**
+ * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
+ * buffers when working with byte arrays
+ * @param builder current message builder
+ * @param b byte array
+ * @throws IOException
+ */
+ public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException {
+ final CodedInputStream codedInput = CodedInputStream.newInstance(b);
+ codedInput.setSizeLimit(b.length);
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+
+ /**
+ * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
+ * buffers when working with byte arrays
+ * @param builder current message builder
+ * @param b byte array
+ * @param offset
+ * @param length
+ * @throws IOException
+ */
+ public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length)
+ throws IOException {
+ final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length);
+ codedInput.setSizeLimit(length);
+ builder.mergeFrom(codedInput);
+ codedInput.checkLastTagWas(0);
+ }
+
public static ReplicationLoadSink toReplicationLoadSink(
ClusterStatusProtos.ReplicationLoadSink cls) {
return new ReplicationLoadSink(cls.getAgeOfLastAppliedOp(), cls.getTimeStampsOfLastAppliedOp());
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
index d20ab44..e1ff078 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
@@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NodeExistsException;
-import com.google.protobuf.InvalidProtocolBufferException;
-
@InterfaceAudience.Private
public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closeable {
private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
@@ -199,9 +197,10 @@ public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closea
ZooKeeperProtos.ReplicationState.newBuilder();
ZooKeeperProtos.ReplicationState state;
try {
- state = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ state = builder.build();
return state.getState();
- } catch (InvalidProtocolBufferException e) {
+ } catch (IOException e) {
throw new DeserializationException(e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index de6f79e..aa2348e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
@@ -492,8 +491,9 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
ZooKeeperProtos.ReplicationPeer.newBuilder();
ZooKeeperProtos.ReplicationPeer peer;
try {
- peer = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ peer = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return convert(peer);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 0fb6969..4299003 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.token;
import com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
@@ -136,8 +137,10 @@ public class AuthenticationTokenIdentifier extends TokenIdentifier {
int len = in.readInt();
byte[] inBytes = new byte[len];
in.readFully(inBytes);
- AuthenticationProtos.TokenIdentifier identifier =
- AuthenticationProtos.TokenIdentifier.newBuilder().mergeFrom(inBytes).build();
+ AuthenticationProtos.TokenIdentifier.Builder builder =
+ AuthenticationProtos.TokenIdentifier.newBuilder();
+ ProtobufUtil.mergeFrom(builder, inBytes);
+ AuthenticationProtos.TokenIdentifier identifier = builder.build();
// sanity check on type
if (!identifier.hasKind() ||
identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java
index d12231b..03579ff 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateClientSideReader.java
@@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.zookeeper;
-import com.google.protobuf.InvalidProtocolBufferException;
-
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -28,6 +26,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
+import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -189,9 +188,9 @@ public class ZKTableStateClientSideReader {
ProtobufUtil.expectPBMagicPrefix(data);
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
int magicLen = ProtobufUtil.lengthOfPBMagic();
- ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
- return t.getState();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
+ return builder.getState();
+ } catch (IOException e) {
KeeperException ke = new KeeperException.DataInconsistencyException();
ke.initCause(e);
throw ke;
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index abf1873..3402b18 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -1893,12 +1893,14 @@ public class ZKUtil {
}
// parse the data of the above peer znode.
try {
- String clusterKey = ZooKeeperProtos.ReplicationPeer.newBuilder().
- mergeFrom(data, pblen, data.length - pblen).getClusterkey();
- sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
- // add the peer-state.
- appendPeerState(zkw, znodeToProcess, sb);
- } catch (InvalidProtocolBufferException ipbe) {
+ ZooKeeperProtos.ReplicationPeer.Builder builder =
+ ZooKeeperProtos.ReplicationPeer.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+ String clusterKey = builder.getClusterkey();
+ sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
+ // add the peer-state.
+ appendPeerState(zkw, znodeToProcess, sb);
+ } catch (IOException ipbe) {
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
}
}
@@ -1916,8 +1918,12 @@ public class ZKUtil {
byte[] peerStateData;
try {
peerStateData = ZKUtil.getData(zkw, peerStateZnode);
- sb.append(ZooKeeperProtos.ReplicationState.newBuilder()
- .mergeFrom(peerStateData, pblen, peerStateData.length - pblen).getState().name());
+ ZooKeeperProtos.ReplicationState.Builder builder =
+ ZooKeeperProtos.ReplicationState.newBuilder();
+ ProtobufUtil.mergeFrom(builder, peerStateData, pblen, peerStateData.length - pblen);
+ sb.append(builder.getState().name());
+ } catch (IOException ipbe) {
+ LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
} catch (InterruptedException e) {
zkw.interruptedException(e);
return;
@@ -2138,8 +2144,9 @@ public class ZKUtil {
ZooKeeperProtos.ReplicationHLogPosition.newBuilder();
ZooKeeperProtos.ReplicationHLogPosition position;
try {
- position = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ position = builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return position.getPosition();
@@ -2194,8 +2201,9 @@ public class ZKUtil {
int pblen = ProtobufUtil.lengthOfPBMagic();
RegionStoreSequenceIds storeIds = null;
try {
- storeIds = regionSequenceIdsBuilder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(regionSequenceIdsBuilder, bytes, pblen, bytes.length - pblen);
+ storeIds = regionSequenceIdsBuilder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return storeIds;
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
index 96ecf28..c063aa9 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
@@ -37,6 +37,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
public int skip(PositionedByteRange src) {
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
CodedInputStream is = inputStreamFromByteRange(src);
+ is.setSizeLimit(src.getLength());
try {
builder.mergeFrom(is);
int consumed = is.getTotalBytesRead();
@@ -51,6 +52,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
public CellProtos.Cell decode(PositionedByteRange src) {
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
CodedInputStream is = inputStreamFromByteRange(src);
+ is.setSizeLimit(src.getLength());
try {
CellProtos.Cell ret = builder.mergeFrom(is).build();
src.setPosition(src.getPosition() + is.getTotalBytesRead());
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
index 7c0c11f..214d574 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.codehaus.jackson.annotate.JsonProperty;
@@ -198,7 +199,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Cell.Builder builder = Cell.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
setColumn(builder.getColumn().toByteArray());
setValue(builder.getData().toByteArray());
if (builder.hasTimestamp()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java
index 094da36..3f76f24 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java
@@ -32,6 +32,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
@@ -133,7 +134,7 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
CellSet.Builder builder = CellSet.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
for (CellSet.Row row: builder.getRowsList()) {
RowModel rowModel = new RowModel(row.getKey().toByteArray());
for (Cell cell: row.getValuesList()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
index 2ffdd4f..f9eaa22 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
@@ -809,7 +810,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Scanner.Builder builder = Scanner.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
if (builder.hasStartRow()) {
startRow = builder.getStartRow().toByteArray();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
index 3b044e7..06660a7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
@@ -31,6 +31,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.Bytes;
@@ -748,7 +749,7 @@ public class StorageClusterStatusModel
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRegions()) {
regions = builder.getRegions();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
index 700e766..d796f8b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
@@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
@@ -146,7 +147,7 @@ public class TableInfoModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableInfo.Builder builder = TableInfo.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
setName(builder.getName());
for (TableInfo.Region region: builder.getRegionsList()) {
add(new TableRegionModel(builder.getName(), region.getId(),
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
index 2ed4e80..37d5461 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
@@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
@@ -104,7 +105,7 @@ public class TableListModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableList.Builder builder = TableList.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
for (String table: builder.getNameList()) {
this.add(new TableModel(table));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
index d843e79..52e40b8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
@@ -309,7 +310,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableSchema.Builder builder = TableSchema.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
this.setName(builder.getName());
for (TableSchema.Attribute attr: builder.getAttrsList()) {
this.addAttribute(attr.getName(), attr.getValue());
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
index 8b08279..ef131d3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
@@ -27,6 +27,7 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.RESTServlet;
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
@@ -188,7 +189,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Version.Builder builder = Version.newBuilder();
- builder.mergeFrom(message);
+ ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRestVersion()) {
restVersion = builder.getRestVersion();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java
index abb0b0a..1feb417 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogTask.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
+import java.io.IOException;
+
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -25,8 +27,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* State of a WAL log split during distributed splitting. State is kept up in zookeeper.
* Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside of
@@ -160,10 +160,10 @@ public class SplitLogTask {
ProtobufUtil.expectPBMagicPrefix(data);
try {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
- ZooKeeperProtos.SplitLogTask slt = ZooKeeperProtos.SplitLogTask.newBuilder().
- mergeFrom(data, prefixLen, data.length - prefixLen).build();
- return new SplitLogTask(slt);
- } catch (InvalidProtocolBufferException e) {
+ ZooKeeperProtos.SplitLogTask.Builder builder = ZooKeeperProtos.SplitLogTask.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
+ return new SplitLogTask(builder.build());
+ } catch (IOException e) {
throw new DeserializationException(Bytes.toStringBinary(data, 0, 64), e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 55b3aef..7ed940e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -126,7 +126,6 @@ import com.google.protobuf.BlockingService;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message;
-import com.google.protobuf.Message.Builder;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;
@@ -1702,7 +1701,9 @@ public class RpcServer implements RpcServerInterface {
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
int headerSize = cis.readRawVarint32();
offset = cis.getTotalBytesRead();
- RequestHeader header = RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build();
+ Message.Builder builder = RequestHeader.newBuilder();
+ ProtobufUtil.mergeFrom(builder, buf, offset, headerSize);
+ RequestHeader header = (RequestHeader) builder.build();
offset += headerSize;
int id = header.getCallId();
if (LOG.isTraceEnabled()) {
@@ -1729,13 +1730,14 @@ public class RpcServer implements RpcServerInterface {
if (header.hasRequestParam() && header.getRequestParam()) {
md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
if (md == null) throw new UnsupportedOperationException(header.getMethodName());
- Builder builder = this.service.getRequestPrototype(md).newBuilderForType();
+ builder = this.service.getRequestPrototype(md).newBuilderForType();
// To read the varint, I need an inputstream; might as well be a CIS.
cis = CodedInputStream.newInstance(buf, offset, buf.length);
int paramSize = cis.readRawVarint32();
offset += cis.getTotalBytesRead();
if (builder != null) {
- param = builder.mergeFrom(buf, offset, paramSize).build();
+ ProtobufUtil.mergeFrom(builder, buf, offset, paramSize);
+ param = builder.build();
}
offset += paramSize;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
index b15b513..4d200e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
@@ -57,7 +57,9 @@ public class MutationSerialization implements Serialization<Mutation> {
@Override
public Mutation deserialize(Mutation mutation) throws IOException {
- MutationProto proto = MutationProto.parseDelimitedFrom(in);
+ ClientProtos.MutationProto.Builder builder = ClientProtos.MutationProto.newBuilder();
+ ProtobufUtil.mergeDelimitedFrom(builder, in);
+ ClientProtos.MutationProto proto = builder.build();
return ProtobufUtil.toMutation(proto);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
index ebd3664..19b12c5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
@@ -125,7 +125,9 @@ public class ResultSerialization extends Configured implements Serialization<Res
@Override
public Result deserialize(Result mutation) throws IOException {
- ClientProtos.Result proto = ClientProtos.Result.parseDelimitedFrom(in);
+ ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder();
+ ProtobufUtil.mergeDelimitedFrom(builder, in);
+ ClientProtos.Result proto = builder.build();
return ProtobufUtil.toResult(proto);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 0ddf0e4..6c2230d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -588,8 +588,9 @@ public class MasterRpcServices extends RSRpcServices
}
//invoke the method
- Message execRequest = service.getRequestPrototype(methodDesc).newBuilderForType()
- .mergeFrom(call.getRequest()).build();
+ Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
+ ProtobufUtil.mergeFrom(builderForType, call.getRequest());
+ Message execRequest = builderForType.build();
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, execController, execRequest, new RpcCallback<Message>() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
index cfaeb98..d2abfd9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
@@ -42,8 +42,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.zookeeper.lock.ZKInterProcessReadWriteLock;
import org.apache.zookeeper.KeeperException;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* A manager for distributed table level locks.
*/
@@ -214,10 +212,10 @@ public abstract class TableLockManager {
return null;
}
try {
- ZooKeeperProtos.TableLock data = ZooKeeperProtos.TableLock.newBuilder().mergeFrom(
- bytes, pblen, bytes.length - pblen).build();
- return data;
- } catch (InvalidProtocolBufferException ex) {
+ ZooKeeperProtos.TableLock.Builder builder = ZooKeeperProtos.TableLock.newBuilder();
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ return builder.build();
+ } catch (IOException ex) {
LOG.warn("Exception in deserialization", ex);
}
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d0e5881..f1f21e2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -6176,8 +6176,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { //
" in region "+Bytes.toStringBinary(getRegionName()));
}
- Message request = service.getRequestPrototype(methodDesc).newBuilderForType()
- .mergeFrom(call.getRequest()).build();
+ Message.Builder builder = service.getRequestPrototype(methodDesc).newBuilderForType();
+ ProtobufUtil.mergeFrom(builder, call.getRequest());
+ Message request = builder.build();
if (coprocessorHost != null) {
request = coprocessorHost.preEndpointInvocation(service, methodName, request);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index f702642..7c8b42b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -3102,9 +3102,9 @@ public class HRegionServer extends HasThread implements
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
+ " called on service " + serviceName);
}
- Message request =
- service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest())
- .build();
+ Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
+ ProtobufUtil.mergeFrom(builderForType, call.getRequest());
+ Message request = builderForType.build();
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, controller, request, new RpcCallback<Message>() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index 285f69b..3aba71a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.EOFException;
import java.io.IOException;
-import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
@@ -35,8 +34,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.io.LimitInputStream;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
@@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* A Protobuf based WAL has the following structure:
@@ -317,9 +316,9 @@ public class ProtobufLogReader extends ReaderBase {
"inputStream.available()= " + this.inputStream.available() + ", " +
"entry size= " + size);
}
- final InputStream limitedInput = new LimitInputStream(this.inputStream, size);
- builder.mergeFrom(limitedInput);
- } catch (InvalidProtocolBufferException ipbe) {
+ ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size),
+ (int)size);
+ } catch (IOException ipbe) {
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
originalPosition + ", currentPosition=" + this.inputStream.getPos() +
", messageSize=" + size + ", currentAvailable=" + available).initCause(ipbe);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index a2648e9..5d102cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -71,7 +71,6 @@ import org.apache.hadoop.io.Text;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* Maintains lists of permission grants to users and groups to allow for
@@ -595,11 +594,11 @@ public class AccessControlLists {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
- AccessControlProtos.UsersAndPermissions perms =
- AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
- data, pblen, data.length - pblen).build();
- return ProtobufUtil.toUserTablePermissions(perms);
- } catch (InvalidProtocolBufferException e) {
+ AccessControlProtos.UsersAndPermissions.Builder builder =
+ AccessControlProtos.UsersAndPermissions.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+ return ProtobufUtil.toUserTablePermissions(builder.build());
+ } catch (IOException e) {
throw new DeserializationException(e);
}
} else {
@@ -694,9 +693,13 @@ public class AccessControlLists {
Tag tag = tagsIterator.next();
if (tag.getType() == ACL_TAG_TYPE) {
// Deserialize the table permissions from the KV
- ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
- AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
- tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
+ // TODO: This can be improved. Don't build UsersAndPermissions just to unpack it again,
+ // use the builder
+ AccessControlProtos.UsersAndPermissions.Builder builder =
+ AccessControlProtos.UsersAndPermissions.newBuilder();
+ ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
+ ListMultimap<String,Permission> kvPerms =
+ ProtobufUtil.toUsersAndPermissions(builder.build());
// Are there permissions for this user?
List<Permission> userPerms = kvPerms.get(user.getShortName());
if (userPerms != null) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index e3ab641..477d9f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -1905,9 +1905,13 @@ public class AccessController extends BaseMasterAndRegionObserver
tags.add(tag);
} else {
// Merge the perms from the older ACL into the current permission set
- ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
- AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
- tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
+ // TODO: The efficiency of this can be improved. Don't build just to unpack
+ // again, use the builder
+ AccessControlProtos.UsersAndPermissions.Builder builder =
+ AccessControlProtos.UsersAndPermissions.newBuilder();
+ ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
+ ListMultimap<String,Permission> kvPerms =
+ ProtobufUtil.toUsersAndPermissions(builder.build());
perms.putAll(kvPerms);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
index 2d7d9c9..0a85f23 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
@@ -689,7 +689,8 @@ class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, C
byte [] scanBytes = new byte[length];
in.readFully(scanBytes);
ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
- instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
+ ProtobufUtil.mergeFrom(scanProto, scanBytes);
+ instance = ProtobufUtil.toScan(scanProto.build());
} else { // Writable or Serializable
Class instanceClass = null;
int b = (byte)WritableUtils.readVInt(in);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index ebff5ff..f327305 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -65,8 +65,6 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.util.ReflectionUtils;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* Utility method to support visibility
*/
@@ -165,10 +163,10 @@ public class VisibilityUtils {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
- VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder()
- .mergeFrom(data, pblen, data.length - pblen).build();
- return request.getVisLabelList();
- } catch (InvalidProtocolBufferException e) {
+ VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+ return builder.getVisLabelList();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
}
@@ -186,10 +184,10 @@ public class VisibilityUtils {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
- MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder()
- .mergeFrom(data, pblen, data.length - pblen).build();
- return multiUserAuths;
- } catch (InvalidProtocolBufferException e) {
+ MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+ return builder.build();
+ } catch (IOException e) {
throw new DeserializationException(e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 9a6576a..4f3f7ce 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import com.google.common.primitives.Ints;
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* Utility methods for interacting with the underlying file system.
@@ -526,11 +525,10 @@ public abstract class FSUtils {
int pblen = ProtobufUtil.lengthOfPBMagic();
FSProtos.HBaseVersionFileContent.Builder builder =
FSProtos.HBaseVersionFileContent.newBuilder();
- FSProtos.HBaseVersionFileContent fileContent;
try {
- fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
- return fileContent.getVersion();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
+ return builder.getVersion();
+ } catch (IOException e) {
// Convert
throw new DeserializationException(e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
index bcd7446..0a0d1ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.zookeeper;
+import java.io.IOException;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -27,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
-import com.google.protobuf.InvalidProtocolBufferException;
-
/**
* Tracks the load balancer state up in ZK
*/
@@ -85,8 +85,8 @@ public class LoadBalancerTracker extends ZooKeeperNodeTracker {
LoadBalancerProtos.LoadBalancerState.newBuilder();
try {
int magicLen = ProtobufUtil.lengthOfPBMagic();
- builder.mergeFrom(pbBytes, magicLen, pbBytes.length - magicLen);
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, pbBytes, magicLen, pbBytes.length - magicLen);
+ } catch (IOException e) {
throw new DeserializationException(e);
}
return builder.build();
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
index b9172bc..365010f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
@@ -87,7 +87,7 @@ public class RegionServerTracker extends ZooKeeperListener {
byte[] data = ZKUtil.getData(watcher, nodePath);
if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
int magicLen = ProtobufUtil.lengthOfPBMagic();
- rsInfoBuilder.mergeFrom(data, magicLen, data.length - magicLen);
+ ProtobufUtil.mergeFrom(rsInfoBuilder, data, magicLen, data.length - magicLen);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Added tracking of RS " + nodePath);
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java
index 7c8229e..4ac153a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableStateManager.java
@@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.zookeeper;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -30,6 +29,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
+import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.Arrays;
import java.util.HashMap;
@@ -324,9 +324,9 @@ public class ZKTableStateManager implements TableStateManager {
ProtobufUtil.expectPBMagicPrefix(data);
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
int magicLen = ProtobufUtil.lengthOfPBMagic();
- ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
- return t.getState();
- } catch (InvalidProtocolBufferException e) {
+ ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
+ return builder.getState();
+ } catch (IOException e) {
KeeperException ke = new KeeperException.DataInconsistencyException();
ke.initCause(e);
throw ke;
http://git-wip-us.apache.org/repos/asf/hbase/blob/0ac0e934/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java
new file mode 100644
index 0000000..c90b12e
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSerialization.java
@@ -0,0 +1,115 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.mapreduce;
+
+
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.experimental.categories.Category;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Result;
+
+import org.apache.hadoop.hbase.mapreduce.MutationSerialization;
+import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@Category(SmallTests.class)
+public class TestSerialization {
+ @Rule public TestName name = new TestName();
+ private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+ private static Configuration conf;
+ private static final byte [] row = Bytes.toBytes("row1");
+ private static final byte [] qualifier = Bytes.toBytes("qualifier1");
+ private static final byte [] family = Bytes.toBytes("family1");
+ private static final byte [] value = new byte[100 * 1024 * 1024];
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ conf = TEST_UTIL.getConfiguration();
+ conf.setInt("hbase.client.keyvalue.maxsize", Integer.MAX_VALUE);
+ }
+
+ @Test
+ public void testLargeMutation()
+ throws Exception {
+ Put put = new Put(row);
+ put.addColumn(family, qualifier, value);
+
+ MutationSerialization serialization = new MutationSerialization();
+ Serializer<Mutation> serializer = serialization.getSerializer(Mutation.class);
+ Deserializer<Mutation> deserializer = serialization.getDeserializer(Mutation.class);
+ ByteArrayOutputStream os = new ByteArrayOutputStream();
+ ByteArrayInputStream is = null;
+ try {
+ serializer.open(os);
+ serializer.serialize(put);
+ os.flush();
+ is = new ByteArrayInputStream(os.toByteArray());
+ deserializer.open(is);
+ deserializer.deserialize(null);
+ } catch (InvalidProtocolBufferException e) {
+ assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
+ e.getCause() instanceof InvalidProtocolBufferException);
+ } catch (Exception e) {
+ fail("Got an invalid exception: " + e);
+ }
+ }
+
+ @Test
+ public void testLargeResult()
+ throws Exception {
+ Result res = Result.create(new KeyValue[] {new KeyValue(row, family, qualifier, 0L, value)});
+
+ ResultSerialization serialization = new ResultSerialization();
+ Serializer<Result> serializer = serialization.getSerializer(Result.class);
+ Deserializer<Result> deserializer = serialization.getDeserializer(Result.class);
+ ByteArrayOutputStream os = new ByteArrayOutputStream();
+ ByteArrayInputStream is = null;
+ try {
+ serializer.open(os);
+ serializer.serialize(res);
+ os.flush();
+ is = new ByteArrayInputStream(os.toByteArray());
+ deserializer.open(is);
+ deserializer.deserialize(null);
+ } catch (InvalidProtocolBufferException e) {
+ assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
+ e.getCause() instanceof InvalidProtocolBufferException);
+ } catch (Exception e) {
+ fail("Got an invalid exception: " + e);
+ }
+ }
+}