You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/07/21 17:20:54 UTC
[31/39] hbase git commit: HBASE-16263 Move all to do w/ protobuf --
*.proto files and generated classes -- under hbase-protocol
http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java
new file mode 100644
index 0000000..3fd34e9
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java
@@ -0,0 +1,424 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: test_rpc_service.proto
+
+package org.apache.hadoop.hbase.ipc.protobuf.generated;
+
+public final class TestRpcServiceProtos {
+ private TestRpcServiceProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ /**
+ * Protobuf service {@code TestProtobufRpcProto}
+ *
+ * <pre>
+ **
+ * A protobuf service for use in tests
+ * </pre>
+ */
+ public static abstract class TestProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected TestProtobufRpcProto() {}
+
+ public interface Interface {
+ /**
+ * <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
+
+ /**
+ * <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
+
+ /**
+ * <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
+ */
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new TestProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) {
+ impl.echo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
+ impl.error(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto)request);
+ case 2:
+ return impl.error(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
+
+ /**
+ * <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
+
+ /**
+ * <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
+ */
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto>specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto>specializeCallback(
+ done));
+ return;
+ case 2:
+ this.error(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto>specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()));
+ }
+
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:TestProtobufRpcProto)
+ }
+
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\026test_rpc_service.proto\032\ntest.proto2\250\001\n" +
+ "\024TestProtobufRpcProto\022/\n\004ping\022\022.EmptyReq" +
+ "uestProto\032\023.EmptyResponseProto\022-\n\004echo\022\021" +
+ ".EchoRequestProto\032\022.EchoResponseProto\0220\n" +
+ "\005error\022\022.EmptyRequestProto\032\023.EmptyRespon" +
+ "seProtoBL\n.org.apache.hadoop.hbase.ipc.p" +
+ "rotobuf.generatedB\024TestRpcServiceProtos\210" +
+ "\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index d7b8461..4e9e0b4 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -21953,7 +21953,7 @@ public final class ClientProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
+ com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
@@ -21969,7 +21969,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -22926,7 +22926,7 @@ public final class ClientProtos {
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
getFsTokenFieldBuilder() {
if (fsTokenBuilder_ == null) {
fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
@@ -22968,7 +22968,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -23397,7 +23397,7 @@ public final class ClientProtos {
public final boolean isInitialized() {
if (!hasLoaded()) {
-
+
return false;
}
return true;
@@ -23679,7 +23679,7 @@ public final class ClientProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
+ com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
@@ -23695,7 +23695,7 @@ public final class ClientProtos {
getKindBytes() {
java.lang.Object ref = kind_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
kind_ = b;
@@ -23722,7 +23722,7 @@ public final class ClientProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
+ com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
@@ -23738,7 +23738,7 @@ public final class ClientProtos {
getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
service_ = b;
@@ -24196,7 +24196,7 @@ public final class ClientProtos {
getKindBytes() {
java.lang.Object ref = kind_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
kind_ = b;
@@ -24270,7 +24270,7 @@ public final class ClientProtos {
getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
service_ = b;
@@ -24820,16 +24820,16 @@ public final class ClientProtos {
public final boolean isInitialized() {
if (!hasTableName()) {
-
+
return false;
}
if (!getTableName().isInitialized()) {
-
+
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
-
+
return false;
}
}
@@ -24959,7 +24959,7 @@ public final class ClientProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
@@ -25076,7 +25076,7 @@ public final class ClientProtos {
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
@@ -25231,7 +25231,7 @@ public final class ClientProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
+ com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
@@ -25247,7 +25247,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -25507,7 +25507,7 @@ public final class ClientProtos {
public final boolean isInitialized() {
if (!hasBulkToken()) {
-
+
return false;
}
return true;
@@ -25561,7 +25561,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -25775,7 +25775,7 @@ public final class ClientProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
+ com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
@@ -25791,7 +25791,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -26114,12 +26114,12 @@ public final class ClientProtos {
public final boolean isInitialized() {
if (!hasBulkToken()) {
-
+
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
-
+
return false;
}
}
@@ -26174,7 +26174,7 @@ public final class ClientProtos {
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
@@ -26323,7 +26323,7 @@ public final class ClientProtos {
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
index 29c49de..528f6e6 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
@@ -1106,7 +1106,7 @@ public final class SecureBulkLoadProtos {
* <code>required .hbase.pb.DelegationToken fs_token = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
getFsTokenFieldBuilder() {
if (fsTokenBuilder_ == null) {
fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
new file mode 100644
index 0000000..4c859e1
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
@@ -0,0 +1,731 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: CellMessage.proto
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+public final class CellMessage {
+ private CellMessage() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface CellOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional bytes row = 1;
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ boolean hasRow();
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ com.google.protobuf.ByteString getRow();
+
+ // optional bytes column = 2;
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ boolean hasColumn();
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ com.google.protobuf.ByteString getColumn();
+
+ // optional int64 timestamp = 3;
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ boolean hasTimestamp();
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ long getTimestamp();
+
+ // optional bytes data = 4;
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ boolean hasData();
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ com.google.protobuf.ByteString getData();
+ }
+ /**
+ * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
+ */
+ public static final class Cell extends
+ com.google.protobuf.GeneratedMessage
+ implements CellOrBuilder {
+ // Use Cell.newBuilder() to construct.
+ private Cell(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final Cell defaultInstance;
+ public static Cell getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Cell getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private Cell(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ row_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ column_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ timestamp_ = input.readInt64();
+ break;
+ }
+ case 34: {
+ bitField0_ |= 0x00000008;
+ data_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<Cell> PARSER =
+ new com.google.protobuf.AbstractParser<Cell>() {
+ public Cell parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new Cell(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<Cell> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional bytes row = 1;
+ public static final int ROW_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString row_;
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+
+ // optional bytes column = 2;
+ public static final int COLUMN_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString column_;
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public boolean hasColumn() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public com.google.protobuf.ByteString getColumn() {
+ return column_;
+ }
+
+ // optional int64 timestamp = 3;
+ public static final int TIMESTAMP_FIELD_NUMBER = 3;
+ private long timestamp_;
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+
+ // optional bytes data = 4;
+ public static final int DATA_FIELD_NUMBER = 4;
+ private com.google.protobuf.ByteString data_;
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public boolean hasData() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public com.google.protobuf.ByteString getData() {
+ return data_;
+ }
+
+ private void initFields() {
+ row_ = com.google.protobuf.ByteString.EMPTY;
+ column_ = com.google.protobuf.ByteString.EMPTY;
+ timestamp_ = 0L;
+ data_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, row_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, column_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeInt64(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeBytes(4, data_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, row_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, column_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, data_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ row_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ column_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ timestamp_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ data_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell build() {
+ org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildPartial() {
+ org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.row_ = row_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.column_ = column_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.timestamp_ = timestamp_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.data_ = data_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) {
+ return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell other) {
+ if (other == org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance()) return this;
+ if (other.hasRow()) {
+ setRow(other.getRow());
+ }
+ if (other.hasColumn()) {
+ setColumn(other.getColumn());
+ }
+ if (other.hasTimestamp()) {
+ setTimestamp(other.getTimestamp());
+ }
+ if (other.hasData()) {
+ setData(other.getData());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional bytes row = 1;
+ private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public Builder setRow(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ row_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes row = 1;</code>
+ *
+ * <pre>
+ * unused if Cell is in a CellSet
+ * </pre>
+ */
+ public Builder clearRow() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ row_ = getDefaultInstance().getRow();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes column = 2;
+ private com.google.protobuf.ByteString column_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public boolean hasColumn() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public com.google.protobuf.ByteString getColumn() {
+ return column_;
+ }
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public Builder setColumn(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ column_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes column = 2;</code>
+ */
+ public Builder clearColumn() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ column_ = getDefaultInstance().getColumn();
+ onChanged();
+ return this;
+ }
+
+ // optional int64 timestamp = 3;
+ private long timestamp_ ;
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public Builder setTimestamp(long value) {
+ bitField0_ |= 0x00000004;
+ timestamp_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int64 timestamp = 3;</code>
+ */
+ public Builder clearTimestamp() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ timestamp_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes data = 4;
+ private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public boolean hasData() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public com.google.protobuf.ByteString getData() {
+ return data_;
+ }
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public Builder setData(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ data_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes data = 4;</code>
+ */
+ public Builder clearData() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ data_ = getDefaultInstance().getData();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
+ }
+
+ static {
+ defaultInstance = new Cell(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\021CellMessage.proto\022/org.apache.hadoop.h" +
+ "base.rest.protobuf.generated\"D\n\004Cell\022\013\n\003" +
+ "row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimestamp\030\003" +
+ " \001(\003\022\014\n\004data\030\004 \001(\014"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
+ new java.lang.String[] { "Row", "Column", "Timestamp", "Data", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}