You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/09/29 19:37:28 UTC
[14/51] [partial] hbase git commit: HBASE-16264 Figure how to deal
with endpoints and shaded pb Shade our protobufs. Do it in a manner that
makes it so we can still have in our API references to com.google.protobuf
(and in REST). The c.g.p in API is for
http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
new file mode 100644
index 0000000..99dbd72
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java
@@ -0,0 +1,2403 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: HFile.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class HFileProtos {
+ private HFileProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface FileInfoProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .hbase.pb.BytesBytesPair map_entry = 1;
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>
+ getMapEntryList();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index);
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ int getMapEntryCount();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getMapEntryOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.FileInfoProto}
+ *
+ * <pre>
+ * Map of name/values
+ * </pre>
+ */
+ public static final class FileInfoProto extends
+ com.google.protobuf.GeneratedMessage
+ implements FileInfoProtoOrBuilder {
+ // Use FileInfoProto.newBuilder() to construct.
+ private FileInfoProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private FileInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final FileInfoProto defaultInstance;
+ public static FileInfoProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FileInfoProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private FileInfoProto(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ mapEntry_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ mapEntry_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<FileInfoProto> PARSER =
+ new com.google.protobuf.AbstractParser<FileInfoProto>() {
+ public FileInfoProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new FileInfoProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<FileInfoProto> getParserForType() {
+ return PARSER;
+ }
+
+ // repeated .hbase.pb.BytesBytesPair map_entry = 1;
+ public static final int MAP_ENTRY_FIELD_NUMBER = 1;
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_;
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() {
+ return mapEntry_;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getMapEntryOrBuilderList() {
+ return mapEntry_;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public int getMapEntryCount() {
+ return mapEntry_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) {
+ return mapEntry_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
+ int index) {
+ return mapEntry_.get(index);
+ }
+
+ private void initFields() {
+ mapEntry_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getMapEntryCount(); i++) {
+ if (!getMapEntry(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < mapEntry_.size(); i++) {
+ output.writeMessage(1, mapEntry_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < mapEntry_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, mapEntry_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) obj;
+
+ boolean result = true;
+ result = result && getMapEntryList()
+ .equals(other.getMapEntryList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getMapEntryCount() > 0) {
+ hash = (37 * hash) + MAP_ENTRY_FIELD_NUMBER;
+ hash = (53 * hash) + getMapEntryList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.FileInfoProto}
+ *
+ * <pre>
+ * Map of name/values
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getMapEntryFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (mapEntryBuilder_ == null) {
+ mapEntry_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ mapEntryBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto(this);
+ int from_bitField0_ = bitField0_;
+ if (mapEntryBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.mapEntry_ = mapEntry_;
+ } else {
+ result.mapEntry_ = mapEntryBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance()) return this;
+ if (mapEntryBuilder_ == null) {
+ if (!other.mapEntry_.isEmpty()) {
+ if (mapEntry_.isEmpty()) {
+ mapEntry_ = other.mapEntry_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureMapEntryIsMutable();
+ mapEntry_.addAll(other.mapEntry_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.mapEntry_.isEmpty()) {
+ if (mapEntryBuilder_.isEmpty()) {
+ mapEntryBuilder_.dispose();
+ mapEntryBuilder_ = null;
+ mapEntry_ = other.mapEntry_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ mapEntryBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getMapEntryFieldBuilder() : null;
+ } else {
+ mapEntryBuilder_.addAllMessages(other.mapEntry_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getMapEntryCount(); i++) {
+ if (!getMapEntry(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .hbase.pb.BytesBytesPair map_entry = 1;
+ private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_ =
+ java.util.Collections.emptyList();
+ private void ensureMapEntryIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ mapEntry_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(mapEntry_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_;
+
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() {
+ if (mapEntryBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(mapEntry_);
+ } else {
+ return mapEntryBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public int getMapEntryCount() {
+ if (mapEntryBuilder_ == null) {
+ return mapEntry_.size();
+ } else {
+ return mapEntryBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) {
+ if (mapEntryBuilder_ == null) {
+ return mapEntry_.get(index);
+ } else {
+ return mapEntryBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder setMapEntry(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) {
+ if (mapEntryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapEntryIsMutable();
+ mapEntry_.set(index, value);
+ onChanged();
+ } else {
+ mapEntryBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder setMapEntry(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
+ if (mapEntryBuilder_ == null) {
+ ensureMapEntryIsMutable();
+ mapEntry_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ mapEntryBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder addMapEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) {
+ if (mapEntryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapEntryIsMutable();
+ mapEntry_.add(value);
+ onChanged();
+ } else {
+ mapEntryBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder addMapEntry(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) {
+ if (mapEntryBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapEntryIsMutable();
+ mapEntry_.add(index, value);
+ onChanged();
+ } else {
+ mapEntryBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder addMapEntry(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
+ if (mapEntryBuilder_ == null) {
+ ensureMapEntryIsMutable();
+ mapEntry_.add(builderForValue.build());
+ onChanged();
+ } else {
+ mapEntryBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder addMapEntry(
+ int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
+ if (mapEntryBuilder_ == null) {
+ ensureMapEntryIsMutable();
+ mapEntry_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ mapEntryBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder addAllMapEntry(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
+ if (mapEntryBuilder_ == null) {
+ ensureMapEntryIsMutable();
+ super.addAll(values, mapEntry_);
+ onChanged();
+ } else {
+ mapEntryBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder clearMapEntry() {
+ if (mapEntryBuilder_ == null) {
+ mapEntry_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ mapEntryBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public Builder removeMapEntry(int index) {
+ if (mapEntryBuilder_ == null) {
+ ensureMapEntryIsMutable();
+ mapEntry_.remove(index);
+ onChanged();
+ } else {
+ mapEntryBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getMapEntryBuilder(
+ int index) {
+ return getMapEntryFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
+ int index) {
+ if (mapEntryBuilder_ == null) {
+ return mapEntry_.get(index); } else {
+ return mapEntryBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getMapEntryOrBuilderList() {
+ if (mapEntryBuilder_ != null) {
+ return mapEntryBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(mapEntry_);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder() {
+ return getMapEntryFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder(
+ int index) {
+ return getMapEntryFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder>
+ getMapEntryBuilderList() {
+ return getMapEntryFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getMapEntryFieldBuilder() {
+ if (mapEntryBuilder_ == null) {
+ mapEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>(
+ mapEntry_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ mapEntry_ = null;
+ }
+ return mapEntryBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.FileInfoProto)
+ }
+
+ static {
+ defaultInstance = new FileInfoProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.FileInfoProto)
+ }
+
+ public interface FileTrailerProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional uint64 file_info_offset = 1;
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ boolean hasFileInfoOffset();
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ long getFileInfoOffset();
+
+ // optional uint64 load_on_open_data_offset = 2;
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ boolean hasLoadOnOpenDataOffset();
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ long getLoadOnOpenDataOffset();
+
+ // optional uint64 uncompressed_data_index_size = 3;
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ boolean hasUncompressedDataIndexSize();
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ long getUncompressedDataIndexSize();
+
+ // optional uint64 total_uncompressed_bytes = 4;
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ boolean hasTotalUncompressedBytes();
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ long getTotalUncompressedBytes();
+
+ // optional uint32 data_index_count = 5;
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ boolean hasDataIndexCount();
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ int getDataIndexCount();
+
+ // optional uint32 meta_index_count = 6;
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ boolean hasMetaIndexCount();
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ int getMetaIndexCount();
+
+ // optional uint64 entry_count = 7;
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ boolean hasEntryCount();
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ long getEntryCount();
+
+ // optional uint32 num_data_index_levels = 8;
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ boolean hasNumDataIndexLevels();
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ int getNumDataIndexLevels();
+
+ // optional uint64 first_data_block_offset = 9;
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ boolean hasFirstDataBlockOffset();
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ long getFirstDataBlockOffset();
+
+ // optional uint64 last_data_block_offset = 10;
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ boolean hasLastDataBlockOffset();
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ long getLastDataBlockOffset();
+
+ // optional string comparator_class_name = 11;
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ boolean hasComparatorClassName();
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ java.lang.String getComparatorClassName();
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ com.google.protobuf.ByteString
+ getComparatorClassNameBytes();
+
+ // optional uint32 compression_codec = 12;
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ boolean hasCompressionCodec();
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ int getCompressionCodec();
+
+ // optional bytes encryption_key = 13;
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ boolean hasEncryptionKey();
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ com.google.protobuf.ByteString getEncryptionKey();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.FileTrailerProto}
+ *
+ * <pre>
+ * HFile file trailer
+ * </pre>
+ */
+ public static final class FileTrailerProto extends
+ com.google.protobuf.GeneratedMessage
+ implements FileTrailerProtoOrBuilder {
+ // Use FileTrailerProto.newBuilder() to construct.
+ private FileTrailerProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private FileTrailerProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final FileTrailerProto defaultInstance;
+ public static FileTrailerProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FileTrailerProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private FileTrailerProto(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ fileInfoOffset_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ loadOnOpenDataOffset_ = input.readUInt64();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ uncompressedDataIndexSize_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ totalUncompressedBytes_ = input.readUInt64();
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000010;
+ dataIndexCount_ = input.readUInt32();
+ break;
+ }
+ case 48: {
+ bitField0_ |= 0x00000020;
+ metaIndexCount_ = input.readUInt32();
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000040;
+ entryCount_ = input.readUInt64();
+ break;
+ }
+ case 64: {
+ bitField0_ |= 0x00000080;
+ numDataIndexLevels_ = input.readUInt32();
+ break;
+ }
+ case 72: {
+ bitField0_ |= 0x00000100;
+ firstDataBlockOffset_ = input.readUInt64();
+ break;
+ }
+ case 80: {
+ bitField0_ |= 0x00000200;
+ lastDataBlockOffset_ = input.readUInt64();
+ break;
+ }
+ case 90: {
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = input.readBytes();
+ break;
+ }
+ case 96: {
+ bitField0_ |= 0x00000800;
+ compressionCodec_ = input.readUInt32();
+ break;
+ }
+ case 106: {
+ bitField0_ |= 0x00001000;
+ encryptionKey_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<FileTrailerProto> PARSER =
+ new com.google.protobuf.AbstractParser<FileTrailerProto>() {
+ public FileTrailerProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new FileTrailerProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<FileTrailerProto> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional uint64 file_info_offset = 1;
+ public static final int FILE_INFO_OFFSET_FIELD_NUMBER = 1;
+ private long fileInfoOffset_;
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public boolean hasFileInfoOffset() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public long getFileInfoOffset() {
+ return fileInfoOffset_;
+ }
+
+ // optional uint64 load_on_open_data_offset = 2;
+ public static final int LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER = 2;
+ private long loadOnOpenDataOffset_;
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public boolean hasLoadOnOpenDataOffset() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public long getLoadOnOpenDataOffset() {
+ return loadOnOpenDataOffset_;
+ }
+
+ // optional uint64 uncompressed_data_index_size = 3;
+ public static final int UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER = 3;
+ private long uncompressedDataIndexSize_;
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public boolean hasUncompressedDataIndexSize() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public long getUncompressedDataIndexSize() {
+ return uncompressedDataIndexSize_;
+ }
+
+ // optional uint64 total_uncompressed_bytes = 4;
+ public static final int TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER = 4;
+ private long totalUncompressedBytes_;
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public boolean hasTotalUncompressedBytes() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public long getTotalUncompressedBytes() {
+ return totalUncompressedBytes_;
+ }
+
+ // optional uint32 data_index_count = 5;
+ public static final int DATA_INDEX_COUNT_FIELD_NUMBER = 5;
+ private int dataIndexCount_;
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public boolean hasDataIndexCount() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public int getDataIndexCount() {
+ return dataIndexCount_;
+ }
+
+ // optional uint32 meta_index_count = 6;
+ public static final int META_INDEX_COUNT_FIELD_NUMBER = 6;
+ private int metaIndexCount_;
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public boolean hasMetaIndexCount() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public int getMetaIndexCount() {
+ return metaIndexCount_;
+ }
+
+ // optional uint64 entry_count = 7;
+ public static final int ENTRY_COUNT_FIELD_NUMBER = 7;
+ private long entryCount_;
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public boolean hasEntryCount() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public long getEntryCount() {
+ return entryCount_;
+ }
+
+ // optional uint32 num_data_index_levels = 8;
+ public static final int NUM_DATA_INDEX_LEVELS_FIELD_NUMBER = 8;
+ private int numDataIndexLevels_;
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public boolean hasNumDataIndexLevels() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public int getNumDataIndexLevels() {
+ return numDataIndexLevels_;
+ }
+
+ // optional uint64 first_data_block_offset = 9;
+ public static final int FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 9;
+ private long firstDataBlockOffset_;
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public boolean hasFirstDataBlockOffset() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public long getFirstDataBlockOffset() {
+ return firstDataBlockOffset_;
+ }
+
+ // optional uint64 last_data_block_offset = 10;
+ public static final int LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 10;
+ private long lastDataBlockOffset_;
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public boolean hasLastDataBlockOffset() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public long getLastDataBlockOffset() {
+ return lastDataBlockOffset_;
+ }
+
+ // optional string comparator_class_name = 11;
+ public static final int COMPARATOR_CLASS_NAME_FIELD_NUMBER = 11;
+ private java.lang.Object comparatorClassName_;
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public boolean hasComparatorClassName() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public java.lang.String getComparatorClassName() {
+ java.lang.Object ref = comparatorClassName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ comparatorClassName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public com.google.protobuf.ByteString
+ getComparatorClassNameBytes() {
+ java.lang.Object ref = comparatorClassName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ comparatorClassName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional uint32 compression_codec = 12;
+ public static final int COMPRESSION_CODEC_FIELD_NUMBER = 12;
+ private int compressionCodec_;
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public boolean hasCompressionCodec() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public int getCompressionCodec() {
+ return compressionCodec_;
+ }
+
+ // optional bytes encryption_key = 13;
+ public static final int ENCRYPTION_KEY_FIELD_NUMBER = 13;
+ private com.google.protobuf.ByteString encryptionKey_;
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public boolean hasEncryptionKey() {
+ return ((bitField0_ & 0x00001000) == 0x00001000);
+ }
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public com.google.protobuf.ByteString getEncryptionKey() {
+ return encryptionKey_;
+ }
+
+ private void initFields() {
+ fileInfoOffset_ = 0L;
+ loadOnOpenDataOffset_ = 0L;
+ uncompressedDataIndexSize_ = 0L;
+ totalUncompressedBytes_ = 0L;
+ dataIndexCount_ = 0;
+ metaIndexCount_ = 0;
+ entryCount_ = 0L;
+ numDataIndexLevels_ = 0;
+ firstDataBlockOffset_ = 0L;
+ lastDataBlockOffset_ = 0L;
+ comparatorClassName_ = "";
+ compressionCodec_ = 0;
+ encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, fileInfoOffset_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, loadOnOpenDataOffset_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, uncompressedDataIndexSize_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, totalUncompressedBytes_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeUInt32(5, dataIndexCount_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt32(6, metaIndexCount_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeUInt64(7, entryCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ output.writeUInt32(8, numDataIndexLevels_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeUInt64(9, firstDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ output.writeUInt64(10, lastDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ output.writeBytes(11, getComparatorClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ output.writeUInt32(12, compressionCodec_);
+ }
+ if (((bitField0_ & 0x00001000) == 0x00001000)) {
+ output.writeBytes(13, encryptionKey_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, fileInfoOffset_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, loadOnOpenDataOffset_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, uncompressedDataIndexSize_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, totalUncompressedBytes_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(5, dataIndexCount_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(6, metaIndexCount_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(7, entryCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(8, numDataIndexLevels_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(9, firstDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(10, lastDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(11, getComparatorClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(12, compressionCodec_);
+ }
+ if (((bitField0_ & 0x00001000) == 0x00001000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(13, encryptionKey_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) obj;
+
+ boolean result = true;
+ result = result && (hasFileInfoOffset() == other.hasFileInfoOffset());
+ if (hasFileInfoOffset()) {
+ result = result && (getFileInfoOffset()
+ == other.getFileInfoOffset());
+ }
+ result = result && (hasLoadOnOpenDataOffset() == other.hasLoadOnOpenDataOffset());
+ if (hasLoadOnOpenDataOffset()) {
+ result = result && (getLoadOnOpenDataOffset()
+ == other.getLoadOnOpenDataOffset());
+ }
+ result = result && (hasUncompressedDataIndexSize() == other.hasUncompressedDataIndexSize());
+ if (hasUncompressedDataIndexSize()) {
+ result = result && (getUncompressedDataIndexSize()
+ == other.getUncompressedDataIndexSize());
+ }
+ result = result && (hasTotalUncompressedBytes() == other.hasTotalUncompressedBytes());
+ if (hasTotalUncompressedBytes()) {
+ result = result && (getTotalUncompressedBytes()
+ == other.getTotalUncompressedBytes());
+ }
+ result = result && (hasDataIndexCount() == other.hasDataIndexCount());
+ if (hasDataIndexCount()) {
+ result = result && (getDataIndexCount()
+ == other.getDataIndexCount());
+ }
+ result = result && (hasMetaIndexCount() == other.hasMetaIndexCount());
+ if (hasMetaIndexCount()) {
+ result = result && (getMetaIndexCount()
+ == other.getMetaIndexCount());
+ }
+ result = result && (hasEntryCount() == other.hasEntryCount());
+ if (hasEntryCount()) {
+ result = result && (getEntryCount()
+ == other.getEntryCount());
+ }
+ result = result && (hasNumDataIndexLevels() == other.hasNumDataIndexLevels());
+ if (hasNumDataIndexLevels()) {
+ result = result && (getNumDataIndexLevels()
+ == other.getNumDataIndexLevels());
+ }
+ result = result && (hasFirstDataBlockOffset() == other.hasFirstDataBlockOffset());
+ if (hasFirstDataBlockOffset()) {
+ result = result && (getFirstDataBlockOffset()
+ == other.getFirstDataBlockOffset());
+ }
+ result = result && (hasLastDataBlockOffset() == other.hasLastDataBlockOffset());
+ if (hasLastDataBlockOffset()) {
+ result = result && (getLastDataBlockOffset()
+ == other.getLastDataBlockOffset());
+ }
+ result = result && (hasComparatorClassName() == other.hasComparatorClassName());
+ if (hasComparatorClassName()) {
+ result = result && getComparatorClassName()
+ .equals(other.getComparatorClassName());
+ }
+ result = result && (hasCompressionCodec() == other.hasCompressionCodec());
+ if (hasCompressionCodec()) {
+ result = result && (getCompressionCodec()
+ == other.getCompressionCodec());
+ }
+ result = result && (hasEncryptionKey() == other.hasEncryptionKey());
+ if (hasEncryptionKey()) {
+ result = result && getEncryptionKey()
+ .equals(other.getEncryptionKey());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFileInfoOffset()) {
+ hash = (37 * hash) + FILE_INFO_OFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getFileInfoOffset());
+ }
+ if (hasLoadOnOpenDataOffset()) {
+ hash = (37 * hash) + LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLoadOnOpenDataOffset());
+ }
+ if (hasUncompressedDataIndexSize()) {
+ hash = (37 * hash) + UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getUncompressedDataIndexSize());
+ }
+ if (hasTotalUncompressedBytes()) {
+ hash = (37 * hash) + TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getTotalUncompressedBytes());
+ }
+ if (hasDataIndexCount()) {
+ hash = (37 * hash) + DATA_INDEX_COUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getDataIndexCount();
+ }
+ if (hasMetaIndexCount()) {
+ hash = (37 * hash) + META_INDEX_COUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getMetaIndexCount();
+ }
+ if (hasEntryCount()) {
+ hash = (37 * hash) + ENTRY_COUNT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getEntryCount());
+ }
+ if (hasNumDataIndexLevels()) {
+ hash = (37 * hash) + NUM_DATA_INDEX_LEVELS_FIELD_NUMBER;
+ hash = (53 * hash) + getNumDataIndexLevels();
+ }
+ if (hasFirstDataBlockOffset()) {
+ hash = (37 * hash) + FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getFirstDataBlockOffset());
+ }
+ if (hasLastDataBlockOffset()) {
+ hash = (37 * hash) + LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLastDataBlockOffset());
+ }
+ if (hasComparatorClassName()) {
+ hash = (37 * hash) + COMPARATOR_CLASS_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getComparatorClassName().hashCode();
+ }
+ if (hasCompressionCodec()) {
+ hash = (37 * hash) + COMPRESSION_CODEC_FIELD_NUMBER;
+ hash = (53 * hash) + getCompressionCodec();
+ }
+ if (hasEncryptionKey()) {
+ hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionKey().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.FileTrailerProto}
+ *
+ * <pre>
+ * HFile file trailer
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ fileInfoOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ loadOnOpenDataOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ uncompressedDataIndexSize_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ totalUncompressedBytes_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ dataIndexCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000010);
+ metaIndexCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ entryCount_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000040);
+ numDataIndexLevels_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000080);
+ firstDataBlockOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000100);
+ lastDataBlockOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000200);
+ comparatorClassName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000400);
+ compressionCodec_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000800);
+ encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00001000);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.fileInfoOffset_ = fileInfoOffset_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.loadOnOpenDataOffset_ = loadOnOpenDataOffset_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.uncompressedDataIndexSize_ = uncompressedDataIndexSize_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.totalUncompressedBytes_ = totalUncompressedBytes_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.dataIndexCount_ = dataIndexCount_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.metaIndexCount_ = metaIndexCount_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000040;
+ }
+ result.entryCount_ = entryCount_;
+ if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+ to_bitField0_ |= 0x00000080;
+ }
+ result.numDataIndexLevels_ = numDataIndexLevels_;
+ if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+ to_bitField0_ |= 0x00000100;
+ }
+ result.firstDataBlockOffset_ = firstDataBlockOffset_;
+ if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+ to_bitField0_ |= 0x00000200;
+ }
+ result.lastDataBlockOffset_ = lastDataBlockOffset_;
+ if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+ to_bitField0_ |= 0x00000400;
+ }
+ result.comparatorClassName_ = comparatorClassName_;
+ if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+ to_bitField0_ |= 0x00000800;
+ }
+ result.compressionCodec_ = compressionCodec_;
+ if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+ to_bitField0_ |= 0x00001000;
+ }
+ result.encryptionKey_ = encryptionKey_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance()) return this;
+ if (other.hasFileInfoOffset()) {
+ setFileInfoOffset(other.getFileInfoOffset());
+ }
+ if (other.hasLoadOnOpenDataOffset()) {
+ setLoadOnOpenDataOffset(other.getLoadOnOpenDataOffset());
+ }
+ if (other.hasUncompressedDataIndexSize()) {
+ setUncompressedDataIndexSize(other.getUncompressedDataIndexSize());
+ }
+ if (other.hasTotalUncompressedBytes()) {
+ setTotalUncompressedBytes(other.getTotalUncompressedBytes());
+ }
+ if (other.hasDataIndexCount()) {
+ setDataIndexCount(other.getDataIndexCount());
+ }
+ if (other.hasMetaIndexCount()) {
+ setMetaIndexCount(other.getMetaIndexCount());
+ }
+ if (other.hasEntryCount()) {
+ setEntryCount(other.getEntryCount());
+ }
+ if (other.hasNumDataIndexLevels()) {
+ setNumDataIndexLevels(other.getNumDataIndexLevels());
+ }
+ if (other.hasFirstDataBlockOffset()) {
+ setFirstDataBlockOffset(other.getFirstDataBlockOffset());
+ }
+ if (other.hasLastDataBlockOffset()) {
+ setLastDataBlockOffset(other.getLastDataBlockOffset());
+ }
+ if (other.hasComparatorClassName()) {
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = other.comparatorClassName_;
+ onChanged();
+ }
+ if (other.hasCompressionCodec()) {
+ setCompressionCodec(other.getCompressionCodec());
+ }
+ if (other.hasEncryptionKey()) {
+ setEncryptionKey(other.getEncryptionKey());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional uint64 file_info_offset = 1;
+ private long fileInfoOffset_ ;
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public boolean hasFileInfoOffset() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public long getFileInfoOffset() {
+ return fileInfoOffset_;
+ }
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public Builder setFileInfoOffset(long value) {
+ bitField0_ |= 0x00000001;
+ fileInfoOffset_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 file_info_offset = 1;</code>
+ */
+ public Builder clearFileInfoOffset() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ fileInfoOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 load_on_open_data_offset = 2;
+ private long loadOnOpenDataOffset_ ;
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public boolean hasLoadOnOpenDataOffset() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public long getLoadOnOpenDataOffset() {
+ return loadOnOpenDataOffset_;
+ }
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public Builder setLoadOnOpenDataOffset(long value) {
+ bitField0_ |= 0x00000002;
+ loadOnOpenDataOffset_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 load_on_open_data_offset = 2;</code>
+ */
+ public Builder clearLoadOnOpenDataOffset() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ loadOnOpenDataOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 uncompressed_data_index_size = 3;
+ private long uncompressedDataIndexSize_ ;
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public boolean hasUncompressedDataIndexSize() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public long getUncompressedDataIndexSize() {
+ return uncompressedDataIndexSize_;
+ }
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public Builder setUncompressedDataIndexSize(long value) {
+ bitField0_ |= 0x00000004;
+ uncompressedDataIndexSize_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 uncompressed_data_index_size = 3;</code>
+ */
+ public Builder clearUncompressedDataIndexSize() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ uncompressedDataIndexSize_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 total_uncompressed_bytes = 4;
+ private long totalUncompressedBytes_ ;
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public boolean hasTotalUncompressedBytes() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public long getTotalUncompressedBytes() {
+ return totalUncompressedBytes_;
+ }
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public Builder setTotalUncompressedBytes(long value) {
+ bitField0_ |= 0x00000008;
+ totalUncompressedBytes_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 total_uncompressed_bytes = 4;</code>
+ */
+ public Builder clearTotalUncompressedBytes() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ totalUncompressedBytes_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 data_index_count = 5;
+ private int dataIndexCount_ ;
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public boolean hasDataIndexCount() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public int getDataIndexCount() {
+ return dataIndexCount_;
+ }
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public Builder setDataIndexCount(int value) {
+ bitField0_ |= 0x00000010;
+ dataIndexCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 data_index_count = 5;</code>
+ */
+ public Builder clearDataIndexCount() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ dataIndexCount_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 meta_index_count = 6;
+ private int metaIndexCount_ ;
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public boolean hasMetaIndexCount() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public int getMetaIndexCount() {
+ return metaIndexCount_;
+ }
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public Builder setMetaIndexCount(int value) {
+ bitField0_ |= 0x00000020;
+ metaIndexCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 meta_index_count = 6;</code>
+ */
+ public Builder clearMetaIndexCount() {
+ bitField0_ = (bitField0_ & ~0x00000020);
+ metaIndexCount_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 entry_count = 7;
+ private long entryCount_ ;
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public boolean hasEntryCount() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public long getEntryCount() {
+ return entryCount_;
+ }
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public Builder setEntryCount(long value) {
+ bitField0_ |= 0x00000040;
+ entryCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 entry_count = 7;</code>
+ */
+ public Builder clearEntryCount() {
+ bitField0_ = (bitField0_ & ~0x00000040);
+ entryCount_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 num_data_index_levels = 8;
+ private int numDataIndexLevels_ ;
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public boolean hasNumDataIndexLevels() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public int getNumDataIndexLevels() {
+ return numDataIndexLevels_;
+ }
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public Builder setNumDataIndexLevels(int value) {
+ bitField0_ |= 0x00000080;
+ numDataIndexLevels_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 num_data_index_levels = 8;</code>
+ */
+ public Builder clearNumDataIndexLevels() {
+ bitField0_ = (bitField0_ & ~0x00000080);
+ numDataIndexLevels_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 first_data_block_offset = 9;
+ private long firstDataBlockOffset_ ;
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public boolean hasFirstDataBlockOffset() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public long getFirstDataBlockOffset() {
+ return firstDataBlockOffset_;
+ }
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public Builder setFirstDataBlockOffset(long value) {
+ bitField0_ |= 0x00000100;
+ firstDataBlockOffset_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 first_data_block_offset = 9;</code>
+ */
+ public Builder clearFirstDataBlockOffset() {
+ bitField0_ = (bitField0_ & ~0x00000100);
+ firstDataBlockOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 last_data_block_offset = 10;
+ private long lastDataBlockOffset_ ;
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public boolean hasLastDataBlockOffset() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public long getLastDataBlockOffset() {
+ return lastDataBlockOffset_;
+ }
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public Builder setLastDataBlockOffset(long value) {
+ bitField0_ |= 0x00000200;
+ lastDataBlockOffset_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 last_data_block_offset = 10;</code>
+ */
+ public Builder clearLastDataBlockOffset() {
+ bitField0_ = (bitField0_ & ~0x00000200);
+ lastDataBlockOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional string comparator_class_name = 11;
+ private java.lang.Object comparatorClassName_ = "";
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public boolean hasComparatorClassName() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public java.lang.String getComparatorClassName() {
+ java.lang.Object ref = comparatorClassName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ comparatorClassName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public com.google.protobuf.ByteString
+ getComparatorClassNameBytes() {
+ java.lang.Object ref = comparatorClassName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ comparatorClassName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public Builder setComparatorClassName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public Builder clearComparatorClassName() {
+ bitField0_ = (bitField0_ & ~0x00000400);
+ comparatorClassName_ = getDefaultInstance().getComparatorClassName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string comparator_class_name = 11;</code>
+ */
+ public Builder setComparatorClassNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 compression_codec = 12;
+ private int compressionCodec_ ;
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public boolean hasCompressionCodec() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public int getCompressionCodec() {
+ return compressionCodec_;
+ }
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public Builder setCompressionCodec(int value) {
+ bitField0_ |= 0x00000800;
+ compressionCodec_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 compression_codec = 12;</code>
+ */
+ public Builder clearCompressionCodec() {
+ bitField0_ = (bitField0_ & ~0x00000800);
+ compressionCodec_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes encryption_key = 13;
+ private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public boolean hasEncryptionKey() {
+ return ((bitField0_ & 0x00001000) == 0x00001000);
+ }
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public com.google.protobuf.ByteString getEncryptionKey() {
+ return encryptionKey_;
+ }
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00001000;
+ encryptionKey_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes encryption_key = 13;</code>
+ */
+ public Builder clearEncryptionKey() {
+ bitField0_ = (bitField0_ & ~0x00001000);
+ encryptionKey_ = getDefaultInstance().getEncryptionKey();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.FileTrailerProto)
+ }
+
+ static {
+ defaultInstance = new FileTrailerProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.FileTrailerProto)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_FileInfoProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_FileInfoProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_FileTrailerProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\013HFile.proto\022\010hbase.pb\032\013HBase.proto\"<\n\r" +
+ "FileInfoProto\022+\n\tmap_entry\030\001 \003(\0132\030.hbase" +
+ ".pb.BytesBytesPair\"\221\003\n\020FileTrailerProto\022" +
+ "\030\n\020file_info_offset\030\001 \001(\004\022 \n\030load_on_ope" +
+ "n_data_offset\030\002 \001(\004\022$\n\034uncompressed_data" +
+ "_index_size\030\003 \001(\004\022 \n\030total_uncompressed_" +
+ "bytes\030\004 \001(\004\022\030\n\020data_i
<TRUNCATED>