You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/01/14 05:07:26 UTC
[3/4] hbase git commit: HBASE-16744 Procedure V2 - Lock procedures to
allow clients to acquire locks on tables/namespaces/regions (Matteo Bertozzi)
http://git-wip-us.apache.org/repos/asf/hbase/blob/4cb09a49/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java
new file mode 100644
index 0000000..577b680
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java
@@ -0,0 +1,5328 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: LockService.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class LockServiceProtos {
+ private LockServiceProtos() {}
+ public static void registerAllExtensions(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
+ }
+
+ public static void registerAllExtensions(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
+ registerAllExtensions(
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
+ }
+ /**
+ * Protobuf enum {@code hbase.pb.LockType}
+ */
+ public enum LockType
+ implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>EXCLUSIVE = 1;</code>
+ */
+ EXCLUSIVE(1),
+ /**
+ * <code>SHARED = 2;</code>
+ */
+ SHARED(2),
+ ;
+
+ /**
+ * <code>EXCLUSIVE = 1;</code>
+ */
+ public static final int EXCLUSIVE_VALUE = 1;
+ /**
+ * <code>SHARED = 2;</code>
+ */
+ public static final int SHARED_VALUE = 2;
+
+
+ public final int getNumber() {
+ return value;
+ }
+
+ /**
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+ @java.lang.Deprecated
+ public static LockType valueOf(int value) {
+ return forNumber(value);
+ }
+
+ public static LockType forNumber(int value) {
+ switch (value) {
+ case 1: return EXCLUSIVE;
+ case 2: return SHARED;
+ default: return null;
+ }
+ }
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockType>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+ LockType> internalValueMap =
+ new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockType>() {
+ public LockType findValueByNumber(int number) {
+ return LockType.forNumber(number);
+ }
+ };
+
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(ordinal());
+ }
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final LockType[] VALUES = values();
+
+ public static LockType valueOf(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int value;
+
+ private LockType(int value) {
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:hbase.pb.LockType)
+ }
+
+ public interface LockRequestOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:hbase.pb.LockRequest)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ boolean hasLockType();
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
+
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ boolean hasNamespace();
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ java.lang.String getNamespace();
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getNamespaceBytes();
+
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ boolean hasTableName();
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>
+ getRegionInfoList();
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ int getRegionInfoCount();
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index);
+
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ boolean hasDescription();
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ java.lang.String getDescription();
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getDescriptionBytes();
+
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ boolean hasNonceGroup();
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ long getNonceGroup();
+
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ boolean hasNonce();
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ long getNonce();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.LockRequest}
+ */
+ public static final class LockRequest extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:hbase.pb.LockRequest)
+ LockRequestOrBuilder {
+ // Use LockRequest.newBuilder() to construct.
+ private LockRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+ super(builder);
+ }
+ private LockRequest() {
+ lockType_ = 1;
+ namespace_ = "";
+ regionInfo_ = java.util.Collections.emptyList();
+ description_ = "";
+ nonceGroup_ = 0L;
+ nonce_ = 0L;
+ }
+
+ @java.lang.Override
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private LockRequest(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ int mutable_bitField0_ = 0;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ bitField0_ |= 0x00000001;
+ lockType_ = rawValue;
+ }
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+ bitField0_ |= 0x00000002;
+ namespace_ = bs;
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000004;
+ break;
+ }
+ case 34: {
+ if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+ regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>();
+ mutable_bitField0_ |= 0x00000008;
+ }
+ regionInfo_.add(
+ input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
+ break;
+ }
+ case 42: {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+ bitField0_ |= 0x00000008;
+ description_ = bs;
+ break;
+ }
+ case 48: {
+ bitField0_ |= 0x00000010;
+ nonceGroup_ = input.readUInt64();
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000020;
+ nonce_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+ e).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+ regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
+ }
+
+ private int bitField0_;
+ public static final int LOCK_TYPE_FIELD_NUMBER = 1;
+ private int lockType_;
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public boolean hasLockType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
+ return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
+ }
+
+ public static final int NAMESPACE_FIELD_NUMBER = 2;
+ private volatile java.lang.Object namespace_;
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public boolean hasNamespace() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public java.lang.String getNamespace() {
+ java.lang.Object ref = namespace_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ namespace_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getNamespaceBytes() {
+ java.lang.Object ref = namespace_;
+ if (ref instanceof java.lang.String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ namespace_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int TABLE_NAME_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
+ return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+ }
+
+ public static final int REGION_INFO_FIELD_NUMBER = 4;
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
+ return regionInfo_;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList() {
+ return regionInfo_;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public int getRegionInfoCount() {
+ return regionInfo_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
+ return regionInfo_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index) {
+ return regionInfo_.get(index);
+ }
+
+ public static final int DESCRIPTION_FIELD_NUMBER = 5;
+ private volatile java.lang.Object description_;
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public boolean hasDescription() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public java.lang.String getDescription() {
+ java.lang.Object ref = description_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ description_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getDescriptionBytes() {
+ java.lang.Object ref = description_;
+ if (ref instanceof java.lang.String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ description_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int NONCE_GROUP_FIELD_NUMBER = 6;
+ private long nonceGroup_;
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public boolean hasNonceGroup() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public long getNonceGroup() {
+ return nonceGroup_;
+ }
+
+ public static final int NONCE_FIELD_NUMBER = 7;
+ private long nonce_;
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public boolean hasNonce() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public long getNonce() {
+ return nonce_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ if (!hasLockType()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasTableName()) {
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getRegionInfoCount(); i++) {
+ if (!getRegionInfo(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeEnum(1, lockType_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, getTableName());
+ }
+ for (int i = 0; i < regionInfo_.size(); i++) {
+ output.writeMessage(4, regionInfo_.get(i));
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeUInt64(6, nonceGroup_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt64(7, nonce_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeEnumSize(1, lockType_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, getTableName());
+ }
+ for (int i = 0; i < regionInfo_.size(); i++) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, regionInfo_.get(i));
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(6, nonceGroup_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(7, nonce_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) obj;
+
+ boolean result = true;
+ result = result && (hasLockType() == other.hasLockType());
+ if (hasLockType()) {
+ result = result && lockType_ == other.lockType_;
+ }
+ result = result && (hasNamespace() == other.hasNamespace());
+ if (hasNamespace()) {
+ result = result && getNamespace()
+ .equals(other.getNamespace());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && getRegionInfoList()
+ .equals(other.getRegionInfoList());
+ result = result && (hasDescription() == other.hasDescription());
+ if (hasDescription()) {
+ result = result && getDescription()
+ .equals(other.getDescription());
+ }
+ result = result && (hasNonceGroup() == other.hasNonceGroup());
+ if (hasNonceGroup()) {
+ result = result && (getNonceGroup()
+ == other.getNonceGroup());
+ }
+ result = result && (hasNonce() == other.hasNonce());
+ if (hasNonce()) {
+ result = result && (getNonce()
+ == other.getNonce());
+ }
+ result = result && unknownFields.equals(other.unknownFields);
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasLockType()) {
+ hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
+ hash = (53 * hash) + lockType_;
+ }
+ if (hasNamespace()) {
+ hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
+ hash = (53 * hash) + getNamespace().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (getRegionInfoCount() > 0) {
+ hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getRegionInfoList().hashCode();
+ }
+ if (hasDescription()) {
+ hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
+ hash = (53 * hash) + getDescription().hashCode();
+ }
+ if (hasNonceGroup()) {
+ hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
+ hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+ getNonceGroup());
+ }
+ if (hasNonce()) {
+ hash = (37 * hash) + NONCE_FIELD_NUMBER;
+ hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+ getNonce());
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(byte[] data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ byte[] data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.LockRequest}
+ */
+ public static final class Builder extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+ // @@protoc_insertion_point(builder_implements:hbase.pb.LockRequest)
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequestOrBuilder {
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
+ getTableNameFieldBuilder();
+ getRegionInfoFieldBuilder();
+ }
+ }
+ public Builder clear() {
+ super.clear();
+ lockType_ = 1;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ namespace_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (tableNameBuilder_ == null) {
+ tableName_ = null;
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000008);
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ description_ = "";
+ bitField0_ = (bitField0_ & ~0x00000010);
+ nonceGroup_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ nonce_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000040);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.lockType_ = lockType_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.namespace_ = namespace_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ if (regionInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
+ bitField0_ = (bitField0_ & ~0x00000008);
+ }
+ result.regionInfo_ = regionInfo_;
+ } else {
+ result.regionInfo_ = regionInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.description_ = description_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.nonceGroup_ = nonceGroup_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.nonce_ = nonce_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder clone() {
+ return (Builder) super.clone();
+ }
+ public Builder setField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.setField(field, value);
+ }
+ public Builder clearField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return (Builder) super.clearField(field);
+ }
+ public Builder clearOneof(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return (Builder) super.clearOneof(oneof);
+ }
+ public Builder setRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, Object value) {
+ return (Builder) super.setRepeatedField(field, index, value);
+ }
+ public Builder addRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.addRepeatedField(field, value);
+ }
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance()) return this;
+ if (other.hasLockType()) {
+ setLockType(other.getLockType());
+ }
+ if (other.hasNamespace()) {
+ bitField0_ |= 0x00000002;
+ namespace_ = other.namespace_;
+ onChanged();
+ }
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ if (regionInfoBuilder_ == null) {
+ if (!other.regionInfo_.isEmpty()) {
+ if (regionInfo_.isEmpty()) {
+ regionInfo_ = other.regionInfo_;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ } else {
+ ensureRegionInfoIsMutable();
+ regionInfo_.addAll(other.regionInfo_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.regionInfo_.isEmpty()) {
+ if (regionInfoBuilder_.isEmpty()) {
+ regionInfoBuilder_.dispose();
+ regionInfoBuilder_ = null;
+ regionInfo_ = other.regionInfo_;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ regionInfoBuilder_ =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
+ getRegionInfoFieldBuilder() : null;
+ } else {
+ regionInfoBuilder_.addAllMessages(other.regionInfo_);
+ }
+ }
+ }
+ if (other.hasDescription()) {
+ bitField0_ |= 0x00000010;
+ description_ = other.description_;
+ onChanged();
+ }
+ if (other.hasNonceGroup()) {
+ setNonceGroup(other.getNonceGroup());
+ }
+ if (other.hasNonce()) {
+ setNonce(other.getNonce());
+ }
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasLockType()) {
+ return false;
+ }
+ if (hasTableName()) {
+ if (!getTableName().isInitialized()) {
+ return false;
+ }
+ }
+ for (int i = 0; i < getRegionInfoCount(); i++) {
+ if (!getRegionInfo(i).isInitialized()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) e.getUnfinishedMessage();
+ throw e.unwrapIOException();
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ private int lockType_ = 1;
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public boolean hasLockType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
+ return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
+ }
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ lockType_ = value.getNumber();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.LockType lock_type = 1;</code>
+ */
+ public Builder clearLockType() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ lockType_ = 1;
+ onChanged();
+ return this;
+ }
+
+ private java.lang.Object namespace_ = "";
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public boolean hasNamespace() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public java.lang.String getNamespace() {
+ java.lang.Object ref = namespace_;
+ if (!(ref instanceof java.lang.String)) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ namespace_ = s;
+ }
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getNamespaceBytes() {
+ java.lang.Object ref = namespace_;
+ if (ref instanceof String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ namespace_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public Builder setNamespace(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ namespace_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public Builder clearNamespace() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ namespace_ = getDefaultInstance().getNamespace();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string namespace = 2;</code>
+ */
+ public Builder setNamespaceBytes(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ namespace_ = value;
+ onChanged();
+ return this;
+ }
+
+ private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public Builder setTableName(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ tableName_ != null &&
+ tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = null;
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getTableNameFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_ == null ?
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+ }
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 3;</code>
+ */
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ getTableName(),
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
+ java.util.Collections.emptyList();
+ private void ensureRegionInfoIsMutable() {
+ if (!((bitField0_ & 0x00000008) == 0x00000008)) {
+ regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_);
+ bitField0_ |= 0x00000008;
+ }
+ }
+
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
+
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
+ if (regionInfoBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(regionInfo_);
+ } else {
+ return regionInfoBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public int getRegionInfoCount() {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.size();
+ } else {
+ return regionInfoBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.get(index);
+ } else {
+ return regionInfoBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder setRegionInfo(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.set(index, value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder setRegionInfo(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder addRegionInfo(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
+ if (regionInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(index, value);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder addRegionInfo(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder addRegionInfo(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ regionInfoBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder addAllRegionInfo(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> values) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
+ values, regionInfo_);
+ onChanged();
+ } else {
+ regionInfoBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder clearRegionInfo() {
+ if (regionInfoBuilder_ == null) {
+ regionInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000008);
+ onChanged();
+ } else {
+ regionInfoBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public Builder removeRegionInfo(int index) {
+ if (regionInfoBuilder_ == null) {
+ ensureRegionInfoIsMutable();
+ regionInfo_.remove(index);
+ onChanged();
+ } else {
+ regionInfoBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
+ int index) {
+ return getRegionInfoFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
+ int index) {
+ if (regionInfoBuilder_ == null) {
+ return regionInfo_.get(index); } else {
+ return regionInfoBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoOrBuilderList() {
+ if (regionInfoBuilder_ != null) {
+ return regionInfoBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(regionInfo_);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
+ return getRegionInfoFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
+ int index) {
+ return getRegionInfoFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder>
+ getRegionInfoBuilderList() {
+ return getRegionInfoFieldBuilder().getBuilderList();
+ }
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
+ getRegionInfoFieldBuilder() {
+ if (regionInfoBuilder_ == null) {
+ regionInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
+ regionInfo_,
+ ((bitField0_ & 0x00000008) == 0x00000008),
+ getParentForChildren(),
+ isClean());
+ regionInfo_ = null;
+ }
+ return regionInfoBuilder_;
+ }
+
+ private java.lang.Object description_ = "";
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public boolean hasDescription() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public java.lang.String getDescription() {
+ java.lang.Object ref = description_;
+ if (!(ref instanceof java.lang.String)) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ description_ = s;
+ }
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getDescriptionBytes() {
+ java.lang.Object ref = description_;
+ if (ref instanceof String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ description_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public Builder setDescription(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ description_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public Builder clearDescription() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ description_ = getDefaultInstance().getDescription();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string description = 5;</code>
+ */
+ public Builder setDescriptionBytes(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ description_ = value;
+ onChanged();
+ return this;
+ }
+
+ private long nonceGroup_ ;
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public boolean hasNonceGroup() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public long getNonceGroup() {
+ return nonceGroup_;
+ }
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public Builder setNonceGroup(long value) {
+ bitField0_ |= 0x00000020;
+ nonceGroup_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 nonce_group = 6 [default = 0];</code>
+ */
+ public Builder clearNonceGroup() {
+ bitField0_ = (bitField0_ & ~0x00000020);
+ nonceGroup_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ private long nonce_ ;
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public boolean hasNonce() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public long getNonce() {
+ return nonce_;
+ }
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public Builder setNonce(long value) {
+ bitField0_ |= 0x00000040;
+ nonce_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 nonce = 7 [default = 0];</code>
+ */
+ public Builder clearNonce() {
+ bitField0_ = (bitField0_ & ~0x00000040);
+ nonce_ = 0L;
+ onChanged();
+ return this;
+ }
+ public final Builder setUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ public final Builder mergeUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.LockRequest)
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.LockRequest)
+ private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest();
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest>
+ PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockRequest>() {
+ public LockRequest parsePartialFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return new LockRequest(input, extensionRegistry);
+ }
+ };
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest> parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest> getParserForType() {
+ return PARSER;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+ }
+
+ public interface LockResponseOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:hbase.pb.LockResponse)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ boolean hasProcId();
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ long getProcId();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.LockResponse}
+ */
+ public static final class LockResponse extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:hbase.pb.LockResponse)
+ LockResponseOrBuilder {
+ // Use LockResponse.newBuilder() to construct.
+ private LockResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+ super(builder);
+ }
+ private LockResponse() {
+ procId_ = 0L;
+ }
+
+ @java.lang.Override
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private LockResponse(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ int mutable_bitField0_ = 0;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ procId_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+ e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
+ }
+
+ private int bitField0_;
+ public static final int PROC_ID_FIELD_NUMBER = 1;
+ private long procId_;
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public boolean hasProcId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public long getProcId() {
+ return procId_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ if (!hasProcId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, procId_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, procId_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) obj;
+
+ boolean result = true;
+ result = result && (hasProcId() == other.hasProcId());
+ if (hasProcId()) {
+ result = result && (getProcId()
+ == other.getProcId());
+ }
+ result = result && unknownFields.equals(other.unknownFields);
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasProcId()) {
+ hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
+ hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+ getProcId());
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(byte[] data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ byte[] data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.LockResponse}
+ */
+ public static final class Builder extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+ // @@protoc_insertion_point(builder_implements:hbase.pb.LockResponse)
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponseOrBuilder {
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
+ }
+ }
+ public Builder clear() {
+ super.clear();
+ procId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.procId_ = procId_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder clone() {
+ return (Builder) super.clone();
+ }
+ public Builder setField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.setField(field, value);
+ }
+ public Builder clearField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return (Builder) super.clearField(field);
+ }
+ public Builder clearOneof(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return (Builder) super.clearOneof(oneof);
+ }
+ public Builder setRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, Object value) {
+ return (Builder) super.setRepeatedField(field, index, value);
+ }
+ public Builder addRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.addRepeatedField(field, value);
+ }
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance()) return this;
+ if (other.hasProcId()) {
+ setProcId(other.getProcId());
+ }
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasProcId()) {
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) e.getUnfinishedMessage();
+ throw e.unwrapIOException();
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ private long procId_ ;
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public boolean hasProcId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public long getProcId() {
+ return procId_;
+ }
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public Builder setProcId(long value) {
+ bitField0_ |= 0x00000001;
+ procId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ public Builder clearProcId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ procId_ = 0L;
+ onChanged();
+ return this;
+ }
+ public final Builder setUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ public final Builder mergeUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.LockResponse)
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.LockResponse)
+ private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse();
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse>
+ PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockResponse>() {
+ public LockResponse parsePartialFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return new LockResponse(input, extensionRegistry);
+ }
+ };
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse> parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse> getParserForType() {
+ return PARSER;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+ }
+
+ public interface LockHeartbeatRequestOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:hbase.pb.LockHeartbeatRequest)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ boolean hasProcId();
+ /**
+ * <code>required uint64 proc_id = 1;</code>
+ */
+ long getProcId();
+
+ /**
+ * <code>optional bool keep_alive = 2 [default = true];</code>
+ */
+ boolean hasKeepAlive();
+ /**
+ * <code>optional bool keep_alive = 2 [default = true];</code>
+ */
+ boolean getKeepAlive();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.LockHeartbeatRequest}
+ */
+ public static final class LockHeartbeatRequest extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessa
<TRUNCATED>