You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/09/29 19:37:16 UTC
[02/51] [partial] hbase git commit: HBASE-16264 Figure how to deal
with endpoints and shaded pb Shade our protobufs. Do it in a manner that
makes it so we can still have in our API references to com.google.protobuf
(and in REST). The c.g.p in API is for
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
new file mode 100644
index 0000000..9513ccb
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
@@ -0,0 +1,12108 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: WAL.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class WALProtos {
+ private WALProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ /**
+ * Protobuf enum {@code hbase.pb.ScopeType}
+ */
+ public enum ScopeType
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>REPLICATION_SCOPE_LOCAL = 0;</code>
+ */
+ REPLICATION_SCOPE_LOCAL(0, 0),
+ /**
+ * <code>REPLICATION_SCOPE_GLOBAL = 1;</code>
+ */
+ REPLICATION_SCOPE_GLOBAL(1, 1),
+ /**
+ * <code>REPLICATION_SCOPE_SERIAL = 2;</code>
+ */
+ REPLICATION_SCOPE_SERIAL(2, 2),
+ ;
+
+ /**
+ * <code>REPLICATION_SCOPE_LOCAL = 0;</code>
+ */
+ public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0;
+ /**
+ * <code>REPLICATION_SCOPE_GLOBAL = 1;</code>
+ */
+ public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1;
+ /**
+ * <code>REPLICATION_SCOPE_SERIAL = 2;</code>
+ */
+ public static final int REPLICATION_SCOPE_SERIAL_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static ScopeType valueOf(int value) {
+ switch (value) {
+ case 0: return REPLICATION_SCOPE_LOCAL;
+ case 1: return REPLICATION_SCOPE_GLOBAL;
+ case 2: return REPLICATION_SCOPE_SERIAL;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() {
+ public ScopeType findValueByNumber(int number) {
+ return ScopeType.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final ScopeType[] VALUES = values();
+
+ public static ScopeType valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private ScopeType(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType)
+ }
+
+ public interface WALHeaderOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional bool has_compression = 1;
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ boolean hasHasCompression();
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ boolean getHasCompression();
+
+ // optional bytes encryption_key = 2;
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ boolean hasEncryptionKey();
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ com.google.protobuf.ByteString getEncryptionKey();
+
+ // optional bool has_tag_compression = 3;
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ boolean hasHasTagCompression();
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ boolean getHasTagCompression();
+
+ // optional string writer_cls_name = 4;
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ boolean hasWriterClsName();
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ java.lang.String getWriterClsName();
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ com.google.protobuf.ByteString
+ getWriterClsNameBytes();
+
+ // optional string cell_codec_cls_name = 5;
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ boolean hasCellCodecClsName();
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ java.lang.String getCellCodecClsName();
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ com.google.protobuf.ByteString
+ getCellCodecClsNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.WALHeader}
+ */
+ public static final class WALHeader extends
+ com.google.protobuf.GeneratedMessage
+ implements WALHeaderOrBuilder {
+ // Use WALHeader.newBuilder() to construct.
+ private WALHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private WALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final WALHeader defaultInstance;
+ public static WALHeader getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public WALHeader getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private WALHeader(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ hasCompression_ = input.readBool();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ encryptionKey_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ hasTagCompression_ = input.readBool();
+ break;
+ }
+ case 34: {
+ bitField0_ |= 0x00000008;
+ writerClsName_ = input.readBytes();
+ break;
+ }
+ case 42: {
+ bitField0_ |= 0x00000010;
+ cellCodecClsName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<WALHeader> PARSER =
+ new com.google.protobuf.AbstractParser<WALHeader>() {
+ public WALHeader parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new WALHeader(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<WALHeader> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional bool has_compression = 1;
+ public static final int HAS_COMPRESSION_FIELD_NUMBER = 1;
+ private boolean hasCompression_;
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public boolean hasHasCompression() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public boolean getHasCompression() {
+ return hasCompression_;
+ }
+
+ // optional bytes encryption_key = 2;
+ public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString encryptionKey_;
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public boolean hasEncryptionKey() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public com.google.protobuf.ByteString getEncryptionKey() {
+ return encryptionKey_;
+ }
+
+ // optional bool has_tag_compression = 3;
+ public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3;
+ private boolean hasTagCompression_;
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public boolean hasHasTagCompression() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public boolean getHasTagCompression() {
+ return hasTagCompression_;
+ }
+
+ // optional string writer_cls_name = 4;
+ public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4;
+ private java.lang.Object writerClsName_;
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public boolean hasWriterClsName() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public java.lang.String getWriterClsName() {
+ java.lang.Object ref = writerClsName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ writerClsName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public com.google.protobuf.ByteString
+ getWriterClsNameBytes() {
+ java.lang.Object ref = writerClsName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ writerClsName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string cell_codec_cls_name = 5;
+ public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5;
+ private java.lang.Object cellCodecClsName_;
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public boolean hasCellCodecClsName() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public java.lang.String getCellCodecClsName() {
+ java.lang.Object ref = cellCodecClsName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ cellCodecClsName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public com.google.protobuf.ByteString
+ getCellCodecClsNameBytes() {
+ java.lang.Object ref = cellCodecClsName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ cellCodecClsName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ hasCompression_ = false;
+ encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ hasTagCompression_ = false;
+ writerClsName_ = "";
+ cellCodecClsName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, hasCompression_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, encryptionKey_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBool(3, hasTagCompression_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeBytes(4, getWriterClsNameBytes());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeBytes(5, getCellCodecClsNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, hasCompression_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, encryptionKey_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(3, hasTagCompression_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, getWriterClsNameBytes());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(5, getCellCodecClsNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) obj;
+
+ boolean result = true;
+ result = result && (hasHasCompression() == other.hasHasCompression());
+ if (hasHasCompression()) {
+ result = result && (getHasCompression()
+ == other.getHasCompression());
+ }
+ result = result && (hasEncryptionKey() == other.hasEncryptionKey());
+ if (hasEncryptionKey()) {
+ result = result && getEncryptionKey()
+ .equals(other.getEncryptionKey());
+ }
+ result = result && (hasHasTagCompression() == other.hasHasTagCompression());
+ if (hasHasTagCompression()) {
+ result = result && (getHasTagCompression()
+ == other.getHasTagCompression());
+ }
+ result = result && (hasWriterClsName() == other.hasWriterClsName());
+ if (hasWriterClsName()) {
+ result = result && getWriterClsName()
+ .equals(other.getWriterClsName());
+ }
+ result = result && (hasCellCodecClsName() == other.hasCellCodecClsName());
+ if (hasCellCodecClsName()) {
+ result = result && getCellCodecClsName()
+ .equals(other.getCellCodecClsName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasHasCompression()) {
+ hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getHasCompression());
+ }
+ if (hasEncryptionKey()) {
+ hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionKey().hashCode();
+ }
+ if (hasHasTagCompression()) {
+ hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getHasTagCompression());
+ }
+ if (hasWriterClsName()) {
+ hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getWriterClsName().hashCode();
+ }
+ if (hasCellCodecClsName()) {
+ hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getCellCodecClsName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.WALHeader}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ hasCompression_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ hasTagCompression_ = false;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ writerClsName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000008);
+ cellCodecClsName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000010);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.hasCompression_ = hasCompression_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.encryptionKey_ = encryptionKey_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.hasTagCompression_ = hasTagCompression_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.writerClsName_ = writerClsName_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.cellCodecClsName_ = cellCodecClsName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this;
+ if (other.hasHasCompression()) {
+ setHasCompression(other.getHasCompression());
+ }
+ if (other.hasEncryptionKey()) {
+ setEncryptionKey(other.getEncryptionKey());
+ }
+ if (other.hasHasTagCompression()) {
+ setHasTagCompression(other.getHasTagCompression());
+ }
+ if (other.hasWriterClsName()) {
+ bitField0_ |= 0x00000008;
+ writerClsName_ = other.writerClsName_;
+ onChanged();
+ }
+ if (other.hasCellCodecClsName()) {
+ bitField0_ |= 0x00000010;
+ cellCodecClsName_ = other.cellCodecClsName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional bool has_compression = 1;
+ private boolean hasCompression_ ;
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public boolean hasHasCompression() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public boolean getHasCompression() {
+ return hasCompression_;
+ }
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public Builder setHasCompression(boolean value) {
+ bitField0_ |= 0x00000001;
+ hasCompression_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bool has_compression = 1;</code>
+ */
+ public Builder clearHasCompression() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ hasCompression_ = false;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes encryption_key = 2;
+ private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public boolean hasEncryptionKey() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public com.google.protobuf.ByteString getEncryptionKey() {
+ return encryptionKey_;
+ }
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ encryptionKey_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes encryption_key = 2;</code>
+ */
+ public Builder clearEncryptionKey() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ encryptionKey_ = getDefaultInstance().getEncryptionKey();
+ onChanged();
+ return this;
+ }
+
+ // optional bool has_tag_compression = 3;
+ private boolean hasTagCompression_ ;
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public boolean hasHasTagCompression() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public boolean getHasTagCompression() {
+ return hasTagCompression_;
+ }
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public Builder setHasTagCompression(boolean value) {
+ bitField0_ |= 0x00000004;
+ hasTagCompression_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bool has_tag_compression = 3;</code>
+ */
+ public Builder clearHasTagCompression() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ hasTagCompression_ = false;
+ onChanged();
+ return this;
+ }
+
+ // optional string writer_cls_name = 4;
+ private java.lang.Object writerClsName_ = "";
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public boolean hasWriterClsName() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public java.lang.String getWriterClsName() {
+ java.lang.Object ref = writerClsName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ writerClsName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public com.google.protobuf.ByteString
+ getWriterClsNameBytes() {
+ java.lang.Object ref = writerClsName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ writerClsName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public Builder setWriterClsName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ writerClsName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public Builder clearWriterClsName() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ writerClsName_ = getDefaultInstance().getWriterClsName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string writer_cls_name = 4;</code>
+ */
+ public Builder setWriterClsNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ writerClsName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string cell_codec_cls_name = 5;
+ private java.lang.Object cellCodecClsName_ = "";
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public boolean hasCellCodecClsName() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public java.lang.String getCellCodecClsName() {
+ java.lang.Object ref = cellCodecClsName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ cellCodecClsName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public com.google.protobuf.ByteString
+ getCellCodecClsNameBytes() {
+ java.lang.Object ref = cellCodecClsName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ cellCodecClsName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public Builder setCellCodecClsName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ cellCodecClsName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public Builder clearCellCodecClsName() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ cellCodecClsName_ = getDefaultInstance().getCellCodecClsName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string cell_codec_cls_name = 5;</code>
+ */
+ public Builder setCellCodecClsNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ cellCodecClsName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader)
+ }
+
+ static {
+ defaultInstance = new WALHeader(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader)
+ }
+
+ public interface WALKeyOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes encoded_region_name = 1;
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ boolean hasEncodedRegionName();
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ com.google.protobuf.ByteString getEncodedRegionName();
+
+ // required bytes table_name = 2;
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ boolean hasTableName();
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ com.google.protobuf.ByteString getTableName();
+
+ // required uint64 log_sequence_number = 3;
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ boolean hasLogSequenceNumber();
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ long getLogSequenceNumber();
+
+ // required uint64 write_time = 4;
+ /**
+ * <code>required uint64 write_time = 4;</code>
+ */
+ boolean hasWriteTime();
+ /**
+ * <code>required uint64 write_time = 4;</code>
+ */
+ long getWriteTime();
+
+ // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated boolean hasClusterId();
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId();
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder();
+
+ // repeated .hbase.pb.FamilyScope scopes = 6;
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>
+ getScopesList();
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getScopes(int index);
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ int getScopesCount();
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
+ getScopesOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
+ int index);
+
+ // optional uint32 following_kv_count = 7;
+ /**
+ * <code>optional uint32 following_kv_count = 7;</code>
+ */
+ boolean hasFollowingKvCount();
+ /**
+ * <code>optional uint32 following_kv_count = 7;</code>
+ */
+ int getFollowingKvCount();
+
+ // repeated .hbase.pb.UUID cluster_ids = 8;
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
+ getClusterIdsList();
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index);
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ int getClusterIdsCount();
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>
+ getClusterIdsOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
+ int index);
+
+ // optional uint64 nonceGroup = 9;
+ /**
+ * <code>optional uint64 nonceGroup = 9;</code>
+ */
+ boolean hasNonceGroup();
+ /**
+ * <code>optional uint64 nonceGroup = 9;</code>
+ */
+ long getNonceGroup();
+
+ // optional uint64 nonce = 10;
+ /**
+ * <code>optional uint64 nonce = 10;</code>
+ */
+ boolean hasNonce();
+ /**
+ * <code>optional uint64 nonce = 10;</code>
+ */
+ long getNonce();
+
+ // optional uint64 orig_sequence_number = 11;
+ /**
+ * <code>optional uint64 orig_sequence_number = 11;</code>
+ */
+ boolean hasOrigSequenceNumber();
+ /**
+ * <code>optional uint64 orig_sequence_number = 11;</code>
+ */
+ long getOrigSequenceNumber();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.WALKey}
+ *
+ * <pre>
+ *
+ * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
+ * for some KVs
+ * </pre>
+ */
+ public static final class WALKey extends
+ com.google.protobuf.GeneratedMessage
+ implements WALKeyOrBuilder {
+ // Use WALKey.newBuilder() to construct.
+ private WALKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final WALKey defaultInstance;
+ public static WALKey getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public WALKey getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private WALKey(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ encodedRegionName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ tableName_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ logSequenceNumber_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ writeTime_ = input.readUInt64();
+ break;
+ }
+ case 42: {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ subBuilder = clusterId_.toBuilder();
+ }
+ clusterId_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(clusterId_);
+ clusterId_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000010;
+ break;
+ }
+ case 50: {
+ if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
+ scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>();
+ mutable_bitField0_ |= 0x00000020;
+ }
+ scopes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry));
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000020;
+ followingKvCount_ = input.readUInt32();
+ break;
+ }
+ case 66: {
+ if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+ clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>();
+ mutable_bitField0_ |= 0x00000080;
+ }
+ clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry));
+ break;
+ }
+ case 72: {
+ bitField0_ |= 0x00000040;
+ nonceGroup_ = input.readUInt64();
+ break;
+ }
+ case 80: {
+ bitField0_ |= 0x00000080;
+ nonce_ = input.readUInt64();
+ break;
+ }
+ case 88: {
+ bitField0_ |= 0x00000100;
+ origSequenceNumber_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
+ scopes_ = java.util.Collections.unmodifiableList(scopes_);
+ }
+ if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+ clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<WALKey> PARSER =
+ new com.google.protobuf.AbstractParser<WALKey>() {
+ public WALKey parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new WALKey(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<WALKey> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required bytes encoded_region_name = 1;
+ public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString encodedRegionName_;
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public boolean hasEncodedRegionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString getEncodedRegionName() {
+ return encodedRegionName_;
+ }
+
+ // required bytes table_name = 2;
+ public static final int TABLE_NAME_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString tableName_;
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public com.google.protobuf.ByteString getTableName() {
+ return tableName_;
+ }
+
+ // required uint64 log_sequence_number = 3;
+ public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3;
+ private long logSequenceNumber_;
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public boolean hasLogSequenceNumber() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public long getLogSequenceNumber() {
+ return logSequenceNumber_;
+ }
+
+ // required uint64 write_time = 4;
+ public static final int WRITE_TIME_FIELD_NUMBER = 4;
+ private long writeTime_;
+ /**
+ * <code>required uint64 write_time = 4;</code>
+ */
+ public boolean hasWriteTime() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>required uint64 write_time = 4;</code>
+ */
+ public long getWriteTime() {
+ return writeTime_;
+ }
+
+ // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
+ public static final int CLUSTER_ID_FIELD_NUMBER = 5;
+ private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_;
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated public boolean hasClusterId() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId() {
+ return clusterId_;
+ }
+ /**
+ * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
+ *
+ * <pre>
+ *
+ *This parameter is deprecated in favor of clusters which
+ *contains the list of clusters that have consumed the change.
+ *It is retained so that the log created by earlier releases (0.94)
+ *can be read by the newer releases.
+ * </pre>
+ */
+ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() {
+ return clusterId_;
+ }
+
+ // repeated .hbase.pb.FamilyScope scopes = 6;
+ public static final int SCOPES_FIELD_NUMBER = 6;
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> scopes_;
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> getScopesList() {
+ return scopes_;
+ }
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
+ getScopesOrBuilderList() {
+ return scopes_;
+ }
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ public int getScopesCount() {
+ return scopes_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getScopes(int index) {
+ return scopes_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder(
+ int index) {
+ return scopes_.get(index);
+ }
+
+ // optional uint32 following_kv_count = 7;
+ public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7;
+ private int followingKvCount_;
+ /**
+ * <code>optional uint32 following_kv_count = 7;</code>
+ */
+ public boolean hasFollowingKvCount() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint32 following_kv_count = 7;</code>
+ */
+ public int getFollowingKvCount() {
+ return followingKvCount_;
+ }
+
+ // repeated .hbase.pb.UUID cluster_ids = 8;
+ public static final int CLUSTER_IDS_FIELD_NUMBER = 8;
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> clusterIds_;
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() {
+ return clusterIds_;
+ }
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>
+ getClusterIdsOrBuilderList() {
+ return clusterIds_;
+ }
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ public int getClusterIdsCount() {
+ return clusterIds_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) {
+ return clusterIds_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
+ *
+ * <pre>
+ *
+ *This field contains the list of clusters that have
+ *consumed the change
+ * </pre>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder(
+ int index) {
+ return clusterIds_.get(index);
+ }
+
+ // optional uint64 nonceGroup = 9;
+ public static final int NONCEGROUP_FIELD_NUMBER = 9;
+ private long nonceGroup_;
+ /**
+ * <code>optional uint64 nonceGroup = 9;</code>
+ */
+ public boolean hasNonceGroup() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional uint64 nonceGroup = 9;</code>
+ */
+ public long getNonceGroup() {
+ return nonceGroup_;
+ }
+
+ // optional uint64 nonce = 10;
+ public static final int NONCE_FIELD_NUMBER = 10;
+ private long nonce_;
+ /**
+ * <code>optional uint64 nonce = 10;</code>
+ */
+ public boolean hasNonce() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ /**
+ * <code>optional uint64 nonce = 10;</code>
+ */
+ public long getNonce() {
+ return nonce_;
+ }
+
+ // optional uint64 orig_sequence_number = 11;
+ public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11;
+ private long origSequenceNumber_;
+ /**
+ * <code>optional uint64 orig_sequence_number = 11;</code>
+ */
+ public boolean hasOrigSequenceNumber() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional uint64 orig_sequence_number = 11;</code>
+ */
+ public long getOrigSequenceNumber() {
+ return origSequenceNumber_;
+ }
+
+ private void initFields() {
+ encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ tableName_ = com.google.protobuf.ByteString.EMPTY;
+ logSequenceNumber_ = 0L;
+ writeTime_ = 0L;
+ clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+ scopes_ = java.util.Collections.emptyList();
+ followingKvCount_ = 0;
+ clusterIds_ = java.util.Collections.emptyList();
+ nonceGroup_ = 0L;
+ nonce_ = 0L;
+ origSequenceNumber_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasEncodedRegionName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasLogSequenceNumber()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasWriteTime()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasClusterId()) {
+ if (!getClusterId().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getScopesCount(); i++) {
+ if (!getScopes(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getClusterIdsCount(); i++) {
+ if (!getClusterIds(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, encodedRegionName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, logSequenceNumber_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, writeTime_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeMessage(5, clusterId_);
+ }
+ for (int i = 0; i < scopes_.size(); i++) {
+ output.writeMessage(6, scopes_.get(i));
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt32(7, followingKvCount_);
+ }
+ for (int i = 0; i < clusterIds_.size(); i++) {
+ output.writeMessage(8, clusterIds_.get(i));
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeUInt64(9, nonceGroup_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ output.writeUInt64(10, nonce_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeUInt64(11, origSequenceNumber_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, encodedRegionName_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, logSequenceNumber_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, writeTime_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(5, clusterId_);
+ }
+ for (int i = 0; i < scopes_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(6, scopes_.get(i));
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(7, followingKvCount_);
+ }
+ for (int i = 0; i < clusterIds_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(8, clusterIds_.get(i));
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(9, nonceGroup_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(10, nonce_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(11, origSequenceNumber_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) obj;
+
+ boolean result = true;
+ result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
+ if (hasEncodedRegionName()) {
+ result = result && getEncodedRegionName()
+ .equals(other.getEncodedRegionName());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
+ if (hasLogSequenceNumber()) {
+ result = result && (getLogSequenceNumber()
+ == other.getLogSequenceNumber());
+ }
+ result = result && (hasWriteTime() == other.hasWriteTime());
+ if (hasWriteTime()) {
+ result = result && (getWriteTime()
+ == other.getWriteTime());
+ }
+ result = result && (hasClusterId() == other.hasClusterId());
+ if (hasClusterId()) {
+ result = result && getClusterId()
+ .equals(other.getClusterId());
+ }
+ result = result && getScopesList()
+ .equals(other.getScopesList());
+ result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
+ if (hasFollowingKvCount()) {
+ result = result && (getFollowingKvCount()
+ == other.getFollowingKvCount());
+ }
+ result = result && getClusterIdsList()
+ .equals(other.getClusterIdsList());
+ result = result && (hasNonceGroup() == other.hasNonceGroup());
+ if (hasNonceGroup()) {
+ result = result && (getNonceGroup()
+ == other.getNonceGroup());
+ }
+ result = result && (hasNonce() == other.hasNonce());
+ if (hasNonce()) {
+ result = result && (getNonce()
+ == other.getNonce());
+ }
+ result = result && (hasOrigSequenceNumber() == other.hasOrigSequenceNumber());
+ if (hasOrigSequenceNumber()) {
+ result = result && (getOrigSequenceNumber()
+ == other.getOrigSequenceNumber());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasEncodedRegionName()) {
+ hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getEncodedRegionName().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (hasLogSequenceNumber()) {
+ hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLogSequenceNumber());
+ }
+ if (hasWriteTime()) {
+ hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getWriteTime());
+ }
+ if (hasClusterId()) {
+ hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
+ hash = (53 * hash) + getClusterId().hashCode();
+ }
+ if (getScopesCount() > 0) {
+ hash = (37 * hash) + SCOPES_FIELD_NUMBER;
+ hash = (53 * hash) + getScopesList().hashCode();
+ }
+ if (hasFollowingKvCount()) {
+ hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getFollowingKvCount();
+ }
+ if (getClusterIdsCount() > 0) {
+ hash = (37 * hash) + CLUSTER_IDS_FIELD_NUMBER;
+ hash = (53 * hash) + getClusterIdsList().hashCode();
+ }
+ if (hasNonceGroup()) {
+ hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getNonceGroup());
+ }
+ if (hasNonce()) {
+ hash = (37 * hash) + NONCE_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getNonce());
+ }
+ if (hasOrigSequenceNumber()) {
+ hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getOrigSequenceNumber());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.WALKey}
+ *
+ * <pre>
+ *
+ * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header
+ * for some KVs
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getClusterIdFieldBuilder();
+ getScopesFieldBuilder();
+ getClusterIdsFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ tableName_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ logSequenceNumber_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ writeTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ if (clusterIdBuilder_ == null) {
+ clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+ } else {
+ clusterIdBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000010);
+ if (scopesBuilder_ == null) {
+ scopes_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000020);
+ } else {
+ scopesBuilder_.clear();
+ }
+ followingKvCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000040);
+ if (clusterIdsBuilder_ == null) {
+ clusterIds_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000080);
+ } else {
+ clusterIdsBuilder_.clear();
+ }
+ nonceGroup_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000100);
+ nonce_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000200);
+ origSequenceNumber_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000400);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.encodedRegionName_ = encodedRegionName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.tableName_ = tableName_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.logSequenceNumber_ = logSequenceNumber_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.writeTime_ = writeTime_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ if (clusterIdBuilder_ == null) {
+ result.clusterId_ = clusterId_;
+ } else {
+ result.clusterId_ = clusterIdBuilder_.build();
+ }
+ if (scopesBuilder_ == null) {
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ scopes_ = java.util.Collections.unmodifiableList(scopes_);
+ bitField0_ = (bitField0_ & ~0x00000020);
+ }
+ result.scopes_ = scopes_;
+ } else {
+ result.scopes_ = scopesBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.followingKvCount_ = followingKvCount_;
+ if (clusterIdsBuilder_ == null) {
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_);
+ bitField0_ = (bitField0_ & ~0x00000080);
+ }
+ result.clusterIds_ = clusterIds_;
+ } else {
+ result.clusterIds_ = clusterIdsBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+ to_bitField0_ |= 0x00000040;
+ }
+ result.nonceGroup_ = nonceGroup_;
+ if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+ to_bitField0_ |= 0x00000080;
+ }
+ result.nonce_ = nonce_;
+ if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+ to_bitField0_ |= 0x00000100;
+ }
+ result.origSequenceNumber_ = origSequenceNumber_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
+ if (other.hasEncodedRegionName()) {
+ setEncodedRegionName(other.getEncodedRegionName());
+ }
+ if (other.hasTableName()) {
+ setTableName(other.getTableName());
+ }
+ if (other.hasLogSequenceNumber()) {
+ setLogSequenceNumber(other.getLogSequenceNumber());
+ }
+ if (other.hasWriteTime()) {
+ setWriteTime(other.getWriteTime());
+ }
+ if (other.hasClusterId()) {
+ mergeClusterId(other.getClusterId());
+ }
+ if (scopesBuilder_ == null) {
+ if (!other.scopes_.isEmpty()) {
+ if (scopes_.isEmpty()) {
+ scopes_ = other.scopes_;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ } else {
+ ensureScopesIsMutable();
+ scopes_.addAll(other.scopes_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.scopes_.isEmpty()) {
+ if (scopesBuilder_.isEmpty()) {
+ scopesBuilder_.dispose();
+ scopesBuilder_ = null;
+ scopes_ = other.scopes_;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ scopesBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getScopesFieldBuilder() : null;
+ } else {
+ scopesBuilder_.addAllMessages(other.scopes_);
+ }
+ }
+ }
+ if (other.hasFollowingKvCount()) {
+ setFollowingKvCount(other.getFollowingKvCount());
+ }
+ if (clusterIdsBuilder_ == null) {
+ if (!other.clusterIds_.isEmpty()) {
+ if (clusterIds_.isEmpty()) {
+ clusterIds_ = other.clusterIds_;
+ bitField0_ = (bitField0_ & ~0x00000080);
+ } else {
+ ensureClusterIdsIsMutable();
+ clusterIds_.addAll(other.clusterIds_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.clusterIds_.isEmpty()) {
+ if (clusterIdsBuilder_.isEmpty()) {
+ clusterIdsBuilder_.dispose();
+ clusterIdsBuilder_ = null;
+ clusterIds_ = other.clusterIds_;
+ bitField0_ = (bitField0_ & ~0x00000080);
+ clusterIdsBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getClusterIdsFieldBuilder() : null;
+ } else {
+ clusterIdsBuilder_.addAllMessages(other.clusterIds_);
+ }
+ }
+ }
+ if (other.hasNonceGroup()) {
+ setNonceGroup(other.getNonceGroup());
+ }
+ if (other.hasNonce()) {
+ setNonce(other.getNonce());
+ }
+ if (other.hasOrigSequenceNumber()) {
+ setOrigSequenceNumber(other.getOrigSequenceNumber());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasEncodedRegionName()) {
+
+ return false;
+ }
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!hasLogSequenceNumber()) {
+
+ return false;
+ }
+ if (!hasWriteTime()) {
+
+ return false;
+ }
+ if (hasClusterId()) {
+ if (!getClusterId().isInitialized()) {
+
+ return false;
+ }
+ }
+ for (int i = 0; i < getScopesCount(); i++) {
+ if (!getScopes(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ for (int i = 0; i < getClusterIdsCount(); i++) {
+ if (!getClusterIds(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required bytes encoded_region_name = 1;
+ private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public boolean hasEncodedRegionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString getEncodedRegionName() {
+ return encodedRegionName_;
+ }
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ encodedRegionName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes encoded_region_name = 1;</code>
+ */
+ public Builder clearEncodedRegionName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
+ onChanged();
+ return this;
+ }
+
+ // required bytes table_name = 2;
+ private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public com.google.protobuf.ByteString getTableName() {
+ return tableName_;
+ }
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public Builder setTableName(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ tableName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes table_name = 2;</code>
+ */
+ public Builder clearTableName() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ tableName_ = getDefaultInstance().getTableName();
+ onChanged();
+ return this;
+ }
+
+ // required uint64 log_sequence_number = 3;
+ private long logSequenceNumber_ ;
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public boolean hasLogSequenceNumber() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public long getLogSequenceNumber() {
+ return logSequenceNumber_;
+ }
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public Builder setLogSequenceNumber(long value) {
+ bitField0_ |= 0x00000004;
+ logSequenceNumber_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required uint64 log_sequence_number = 3;</code>
+ */
+ public Builder clearLogSequenceNumber() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ logSequenceNumber_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 write_time = 4;
+ private long writeTime_ ;
+ /**
+ * <code>re
<TRUNCATED>