You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by mb...@apache.org on 2014/09/18 23:45:21 UTC
[4/7] HBASE-11598 Add simple rpc throttling
http://git-wip-us.apache.org/repos/asf/hbase/blob/bd8df9cc/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/QuotaProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/QuotaProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/QuotaProtos.java
new file mode 100644
index 0000000..5eac192
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/QuotaProtos.java
@@ -0,0 +1,4378 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Quota.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class QuotaProtos {
+ private QuotaProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ /**
+ * Protobuf enum {@code QuotaScope}
+ */
+ public enum QuotaScope
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>CLUSTER = 1;</code>
+ */
+ CLUSTER(0, 1),
+ /**
+ * <code>MACHINE = 2;</code>
+ */
+ MACHINE(1, 2),
+ ;
+
+ /**
+ * <code>CLUSTER = 1;</code>
+ */
+ public static final int CLUSTER_VALUE = 1;
+ /**
+ * <code>MACHINE = 2;</code>
+ */
+ public static final int MACHINE_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static QuotaScope valueOf(int value) {
+ switch (value) {
+ case 1: return CLUSTER;
+ case 2: return MACHINE;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<QuotaScope>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<QuotaScope>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<QuotaScope>() {
+ public QuotaScope findValueByNumber(int number) {
+ return QuotaScope.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final QuotaScope[] VALUES = values();
+
+ public static QuotaScope valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private QuotaScope(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:QuotaScope)
+ }
+
+ /**
+ * Protobuf enum {@code ThrottleType}
+ */
+ public enum ThrottleType
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>REQUEST_NUMBER = 1;</code>
+ */
+ REQUEST_NUMBER(0, 1),
+ /**
+ * <code>REQUEST_SIZE = 2;</code>
+ */
+ REQUEST_SIZE(1, 2),
+ /**
+ * <code>WRITE_NUMBER = 3;</code>
+ */
+ WRITE_NUMBER(2, 3),
+ /**
+ * <code>WRITE_SIZE = 4;</code>
+ */
+ WRITE_SIZE(3, 4),
+ /**
+ * <code>READ_NUMBER = 5;</code>
+ */
+ READ_NUMBER(4, 5),
+ /**
+ * <code>READ_SIZE = 6;</code>
+ */
+ READ_SIZE(5, 6),
+ ;
+
+ /**
+ * <code>REQUEST_NUMBER = 1;</code>
+ */
+ public static final int REQUEST_NUMBER_VALUE = 1;
+ /**
+ * <code>REQUEST_SIZE = 2;</code>
+ */
+ public static final int REQUEST_SIZE_VALUE = 2;
+ /**
+ * <code>WRITE_NUMBER = 3;</code>
+ */
+ public static final int WRITE_NUMBER_VALUE = 3;
+ /**
+ * <code>WRITE_SIZE = 4;</code>
+ */
+ public static final int WRITE_SIZE_VALUE = 4;
+ /**
+ * <code>READ_NUMBER = 5;</code>
+ */
+ public static final int READ_NUMBER_VALUE = 5;
+ /**
+ * <code>READ_SIZE = 6;</code>
+ */
+ public static final int READ_SIZE_VALUE = 6;
+
+
+ public final int getNumber() { return value; }
+
+ public static ThrottleType valueOf(int value) {
+ switch (value) {
+ case 1: return REQUEST_NUMBER;
+ case 2: return REQUEST_SIZE;
+ case 3: return WRITE_NUMBER;
+ case 4: return WRITE_SIZE;
+ case 5: return READ_NUMBER;
+ case 6: return READ_SIZE;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<ThrottleType>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<ThrottleType>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<ThrottleType>() {
+ public ThrottleType findValueByNumber(int number) {
+ return ThrottleType.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(1);
+ }
+
+ private static final ThrottleType[] VALUES = values();
+
+ public static ThrottleType valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private ThrottleType(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:ThrottleType)
+ }
+
+ /**
+ * Protobuf enum {@code QuotaType}
+ */
+ public enum QuotaType
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>THROTTLE = 1;</code>
+ */
+ THROTTLE(0, 1),
+ ;
+
+ /**
+ * <code>THROTTLE = 1;</code>
+ */
+ public static final int THROTTLE_VALUE = 1;
+
+
+ public final int getNumber() { return value; }
+
+ public static QuotaType valueOf(int value) {
+ switch (value) {
+ case 1: return THROTTLE;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<QuotaType>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<QuotaType>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<QuotaType>() {
+ public QuotaType findValueByNumber(int number) {
+ return QuotaType.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(2);
+ }
+
+ private static final QuotaType[] VALUES = values();
+
+ public static QuotaType valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private QuotaType(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:QuotaType)
+ }
+
+ public interface TimedQuotaOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .TimeUnit time_unit = 1;
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ boolean hasTimeUnit();
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit();
+
+ // optional uint64 soft_limit = 2;
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ boolean hasSoftLimit();
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ long getSoftLimit();
+
+ // optional float share = 3;
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ boolean hasShare();
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ float getShare();
+
+ // optional .QuotaScope scope = 4 [default = MACHINE];
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ boolean hasScope();
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope();
+ }
+ /**
+ * Protobuf type {@code TimedQuota}
+ */
+ public static final class TimedQuota extends
+ com.google.protobuf.GeneratedMessage
+ implements TimedQuotaOrBuilder {
+ // Use TimedQuota.newBuilder() to construct.
+ private TimedQuota(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private TimedQuota(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TimedQuota defaultInstance;
+ public static TimedQuota getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TimedQuota getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private TimedQuota(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ bitField0_ |= 0x00000001;
+ timeUnit_ = value;
+ }
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ softLimit_ = input.readUInt64();
+ break;
+ }
+ case 29: {
+ bitField0_ |= 0x00000004;
+ share_ = input.readFloat();
+ break;
+ }
+ case 32: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope value = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(4, rawValue);
+ } else {
+ bitField0_ |= 0x00000008;
+ scope_ = value;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<TimedQuota> PARSER =
+ new com.google.protobuf.AbstractParser<TimedQuota>() {
+ public TimedQuota parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TimedQuota(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<TimedQuota> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .TimeUnit time_unit = 1;
+ public static final int TIME_UNIT_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit timeUnit_;
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public boolean hasTimeUnit() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
+ return timeUnit_;
+ }
+
+ // optional uint64 soft_limit = 2;
+ public static final int SOFT_LIMIT_FIELD_NUMBER = 2;
+ private long softLimit_;
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public boolean hasSoftLimit() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public long getSoftLimit() {
+ return softLimit_;
+ }
+
+ // optional float share = 3;
+ public static final int SHARE_FIELD_NUMBER = 3;
+ private float share_;
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public boolean hasShare() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public float getShare() {
+ return share_;
+ }
+
+ // optional .QuotaScope scope = 4 [default = MACHINE];
+ public static final int SCOPE_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope scope_;
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public boolean hasScope() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope() {
+ return scope_;
+ }
+
+ private void initFields() {
+ timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
+ softLimit_ = 0L;
+ share_ = 0F;
+ scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTimeUnit()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeEnum(1, timeUnit_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, softLimit_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeFloat(3, share_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeEnum(4, scope_.getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(1, timeUnit_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, softLimit_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeFloatSize(3, share_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(4, scope_.getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) obj;
+
+ boolean result = true;
+ result = result && (hasTimeUnit() == other.hasTimeUnit());
+ if (hasTimeUnit()) {
+ result = result &&
+ (getTimeUnit() == other.getTimeUnit());
+ }
+ result = result && (hasSoftLimit() == other.hasSoftLimit());
+ if (hasSoftLimit()) {
+ result = result && (getSoftLimit()
+ == other.getSoftLimit());
+ }
+ result = result && (hasShare() == other.hasShare());
+ if (hasShare()) {
+ result = result && (Float.floatToIntBits(getShare()) == Float.floatToIntBits(other.getShare()));
+ }
+ result = result && (hasScope() == other.hasScope());
+ if (hasScope()) {
+ result = result &&
+ (getScope() == other.getScope());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasTimeUnit()) {
+ hash = (37 * hash) + TIME_UNIT_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getTimeUnit());
+ }
+ if (hasSoftLimit()) {
+ hash = (37 * hash) + SOFT_LIMIT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getSoftLimit());
+ }
+ if (hasShare()) {
+ hash = (37 * hash) + SHARE_FIELD_NUMBER;
+ hash = (53 * hash) + Float.floatToIntBits(
+ getShare());
+ }
+ if (hasScope()) {
+ hash = (37 * hash) + SCOPE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getScope());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code TimedQuota}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ softLimit_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ share_ = 0F;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota build() {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.timeUnit_ = timeUnit_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.softLimit_ = softLimit_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.share_ = share_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.scope_ = scope_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) return this;
+ if (other.hasTimeUnit()) {
+ setTimeUnit(other.getTimeUnit());
+ }
+ if (other.hasSoftLimit()) {
+ setSoftLimit(other.getSoftLimit());
+ }
+ if (other.hasShare()) {
+ setShare(other.getShare());
+ }
+ if (other.hasScope()) {
+ setScope(other.getScope());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTimeUnit()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .TimeUnit time_unit = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public boolean hasTimeUnit() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
+ return timeUnit_;
+ }
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public Builder setTimeUnit(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ timeUnit_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required .TimeUnit time_unit = 1;</code>
+ */
+ public Builder clearTimeUnit() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 soft_limit = 2;
+ private long softLimit_ ;
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public boolean hasSoftLimit() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public long getSoftLimit() {
+ return softLimit_;
+ }
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public Builder setSoftLimit(long value) {
+ bitField0_ |= 0x00000002;
+ softLimit_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 soft_limit = 2;</code>
+ */
+ public Builder clearSoftLimit() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ softLimit_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional float share = 3;
+ private float share_ ;
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public boolean hasShare() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public float getShare() {
+ return share_;
+ }
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public Builder setShare(float value) {
+ bitField0_ |= 0x00000004;
+ share_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional float share = 3;</code>
+ */
+ public Builder clearShare() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ share_ = 0F;
+ onChanged();
+ return this;
+ }
+
+ // optional .QuotaScope scope = 4 [default = MACHINE];
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public boolean hasScope() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope() {
+ return scope_;
+ }
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public Builder setScope(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ scope_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
+ */
+ public Builder clearScope() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:TimedQuota)
+ }
+
+ static {
+ defaultInstance = new TimedQuota(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:TimedQuota)
+ }
+
+ public interface ThrottleOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .TimedQuota req_num = 1;
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ boolean hasReqNum();
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum();
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder();
+
+ // optional .TimedQuota req_size = 2;
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ boolean hasReqSize();
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize();
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder();
+
+ // optional .TimedQuota write_num = 3;
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ boolean hasWriteNum();
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum();
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder();
+
+ // optional .TimedQuota write_size = 4;
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ boolean hasWriteSize();
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize();
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder();
+
+ // optional .TimedQuota read_num = 5;
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ boolean hasReadNum();
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadNum();
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder();
+
+ // optional .TimedQuota read_size = 6;
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ boolean hasReadSize();
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadSize();
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder();
+ }
+ /**
+ * Protobuf type {@code Throttle}
+ */
+ public static final class Throttle extends
+ com.google.protobuf.GeneratedMessage
+ implements ThrottleOrBuilder {
+ // Use Throttle.newBuilder() to construct.
+ private Throttle(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private Throttle(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final Throttle defaultInstance;
+ public static Throttle getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Throttle getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private Throttle(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = reqNum_.toBuilder();
+ }
+ reqNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(reqNum_);
+ reqNum_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = reqSize_.toBuilder();
+ }
+ reqSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(reqSize_);
+ reqSize_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ subBuilder = writeNum_.toBuilder();
+ }
+ writeNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(writeNum_);
+ writeNum_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000004;
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ subBuilder = writeSize_.toBuilder();
+ }
+ writeSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(writeSize_);
+ writeSize_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000008;
+ break;
+ }
+ case 42: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ subBuilder = readNum_.toBuilder();
+ }
+ readNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(readNum_);
+ readNum_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000010;
+ break;
+ }
+ case 50: {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ subBuilder = readSize_.toBuilder();
+ }
+ readSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(readSize_);
+ readSize_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000020;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<Throttle> PARSER =
+ new com.google.protobuf.AbstractParser<Throttle>() {
+ public Throttle parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new Throttle(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<Throttle> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional .TimedQuota req_num = 1;
+ public static final int REQ_NUM_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqNum_;
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public boolean hasReqNum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
+ return reqNum_;
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
+ return reqNum_;
+ }
+
+ // optional .TimedQuota req_size = 2;
+ public static final int REQ_SIZE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqSize_;
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public boolean hasReqSize() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
+ return reqSize_;
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
+ return reqSize_;
+ }
+
+ // optional .TimedQuota write_num = 3;
+ public static final int WRITE_NUM_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeNum_;
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public boolean hasWriteNum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
+ return writeNum_;
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
+ return writeNum_;
+ }
+
+ // optional .TimedQuota write_size = 4;
+ public static final int WRITE_SIZE_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeSize_;
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public boolean hasWriteSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
+ return writeSize_;
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
+ return writeSize_;
+ }
+
+ // optional .TimedQuota read_num = 5;
+ public static final int READ_NUM_FIELD_NUMBER = 5;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readNum_;
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ public boolean hasReadNum() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadNum() {
+ return readNum_;
+ }
+ /**
+ * <code>optional .TimedQuota read_num = 5;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() {
+ return readNum_;
+ }
+
+ // optional .TimedQuota read_size = 6;
+ public static final int READ_SIZE_FIELD_NUMBER = 6;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readSize_;
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ public boolean hasReadSize() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadSize() {
+ return readSize_;
+ }
+ /**
+ * <code>optional .TimedQuota read_size = 6;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() {
+ return readSize_;
+ }
+
+ private void initFields() {
+ reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (hasReqNum()) {
+ if (!getReqNum().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasReqSize()) {
+ if (!getReqSize().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasWriteNum()) {
+ if (!getWriteNum().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasWriteSize()) {
+ if (!getWriteSize().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasReadNum()) {
+ if (!getReadNum().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasReadSize()) {
+ if (!getReadSize().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, reqNum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, reqSize_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, writeNum_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, writeSize_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeMessage(5, readNum_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeMessage(6, readSize_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, reqNum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, reqSize_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, writeNum_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, writeSize_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(5, readNum_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(6, readSize_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) obj;
+
+ boolean result = true;
+ result = result && (hasReqNum() == other.hasReqNum());
+ if (hasReqNum()) {
+ result = result && getReqNum()
+ .equals(other.getReqNum());
+ }
+ result = result && (hasReqSize() == other.hasReqSize());
+ if (hasReqSize()) {
+ result = result && getReqSize()
+ .equals(other.getReqSize());
+ }
+ result = result && (hasWriteNum() == other.hasWriteNum());
+ if (hasWriteNum()) {
+ result = result && getWriteNum()
+ .equals(other.getWriteNum());
+ }
+ result = result && (hasWriteSize() == other.hasWriteSize());
+ if (hasWriteSize()) {
+ result = result && getWriteSize()
+ .equals(other.getWriteSize());
+ }
+ result = result && (hasReadNum() == other.hasReadNum());
+ if (hasReadNum()) {
+ result = result && getReadNum()
+ .equals(other.getReadNum());
+ }
+ result = result && (hasReadSize() == other.hasReadSize());
+ if (hasReadSize()) {
+ result = result && getReadSize()
+ .equals(other.getReadSize());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasReqNum()) {
+ hash = (37 * hash) + REQ_NUM_FIELD_NUMBER;
+ hash = (53 * hash) + getReqNum().hashCode();
+ }
+ if (hasReqSize()) {
+ hash = (37 * hash) + REQ_SIZE_FIELD_NUMBER;
+ hash = (53 * hash) + getReqSize().hashCode();
+ }
+ if (hasWriteNum()) {
+ hash = (37 * hash) + WRITE_NUM_FIELD_NUMBER;
+ hash = (53 * hash) + getWriteNum().hashCode();
+ }
+ if (hasWriteSize()) {
+ hash = (37 * hash) + WRITE_SIZE_FIELD_NUMBER;
+ hash = (53 * hash) + getWriteSize().hashCode();
+ }
+ if (hasReadNum()) {
+ hash = (37 * hash) + READ_NUM_FIELD_NUMBER;
+ hash = (53 * hash) + getReadNum().hashCode();
+ }
+ if (hasReadSize()) {
+ hash = (37 * hash) + READ_SIZE_FIELD_NUMBER;
+ hash = (53 * hash) + getReadSize().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code Throttle}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getReqNumFieldBuilder();
+ getReqSizeFieldBuilder();
+ getWriteNumFieldBuilder();
+ getWriteSizeFieldBuilder();
+ getReadNumFieldBuilder();
+ getReadSizeFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (reqNumBuilder_ == null) {
+ reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ reqNumBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (reqSizeBuilder_ == null) {
+ reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ reqSizeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (writeNumBuilder_ == null) {
+ writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ writeNumBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (writeSizeBuilder_ == null) {
+ writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ writeSizeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ if (readNumBuilder_ == null) {
+ readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ readNumBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000010);
+ if (readSizeBuilder_ == null) {
+ readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ } else {
+ readSizeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000020);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle build() {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (reqNumBuilder_ == null) {
+ result.reqNum_ = reqNum_;
+ } else {
+ result.reqNum_ = reqNumBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (reqSizeBuilder_ == null) {
+ result.reqSize_ = reqSize_;
+ } else {
+ result.reqSize_ = reqSizeBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (writeNumBuilder_ == null) {
+ result.writeNum_ = writeNum_;
+ } else {
+ result.writeNum_ = writeNumBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (writeSizeBuilder_ == null) {
+ result.writeSize_ = writeSize_;
+ } else {
+ result.writeSize_ = writeSizeBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ if (readNumBuilder_ == null) {
+ result.readNum_ = readNum_;
+ } else {
+ result.readNum_ = readNumBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ if (readSizeBuilder_ == null) {
+ result.readSize_ = readSize_;
+ } else {
+ result.readSize_ = readSizeBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) return this;
+ if (other.hasReqNum()) {
+ mergeReqNum(other.getReqNum());
+ }
+ if (other.hasReqSize()) {
+ mergeReqSize(other.getReqSize());
+ }
+ if (other.hasWriteNum()) {
+ mergeWriteNum(other.getWriteNum());
+ }
+ if (other.hasWriteSize()) {
+ mergeWriteSize(other.getWriteSize());
+ }
+ if (other.hasReadNum()) {
+ mergeReadNum(other.getReadNum());
+ }
+ if (other.hasReadSize()) {
+ mergeReadSize(other.getReadSize());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (hasReqNum()) {
+ if (!getReqNum().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasReqSize()) {
+ if (!getReqSize().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasWriteNum()) {
+ if (!getWriteNum().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasWriteSize()) {
+ if (!getWriteSize().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasReadNum()) {
+ if (!getReadNum().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasReadSize()) {
+ if (!getReadSize().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .TimedQuota req_num = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqNumBuilder_;
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public boolean hasReqNum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
+ if (reqNumBuilder_ == null) {
+ return reqNum_;
+ } else {
+ return reqNumBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public Builder setReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (reqNumBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ reqNum_ = value;
+ onChanged();
+ } else {
+ reqNumBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public Builder setReqNum(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
+ if (reqNumBuilder_ == null) {
+ reqNum_ = builderForValue.build();
+ onChanged();
+ } else {
+ reqNumBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public Builder mergeReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (reqNumBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ reqNum_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
+ reqNum_ =
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqNum_).mergeFrom(value).buildPartial();
+ } else {
+ reqNum_ = value;
+ }
+ onChanged();
+ } else {
+ reqNumBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public Builder clearReqNum() {
+ if (reqNumBuilder_ == null) {
+ reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ onChanged();
+ } else {
+ reqNumBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqNumBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getReqNumFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
+ if (reqNumBuilder_ != null) {
+ return reqNumBuilder_.getMessageOrBuilder();
+ } else {
+ return reqNum_;
+ }
+ }
+ /**
+ * <code>optional .TimedQuota req_num = 1;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+ getReqNumFieldBuilder() {
+ if (reqNumBuilder_ == null) {
+ reqNumBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
+ reqNum_,
+ getParentForChildren(),
+ isClean());
+ reqNum_ = null;
+ }
+ return reqNumBuilder_;
+ }
+
+ // optional .TimedQuota req_size = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqSizeBuilder_;
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public boolean hasReqSize() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
+ if (reqSizeBuilder_ == null) {
+ return reqSize_;
+ } else {
+ return reqSizeBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public Builder setReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (reqSizeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ reqSize_ = value;
+ onChanged();
+ } else {
+ reqSizeBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public Builder setReqSize(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
+ if (reqSizeBuilder_ == null) {
+ reqSize_ = builderForValue.build();
+ onChanged();
+ } else {
+ reqSizeBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public Builder mergeReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (reqSizeBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ reqSize_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
+ reqSize_ =
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqSize_).mergeFrom(value).buildPartial();
+ } else {
+ reqSize_ = value;
+ }
+ onChanged();
+ } else {
+ reqSizeBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public Builder clearReqSize() {
+ if (reqSizeBuilder_ == null) {
+ reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ onChanged();
+ } else {
+ reqSizeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqSizeBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getReqSizeFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
+ if (reqSizeBuilder_ != null) {
+ return reqSizeBuilder_.getMessageOrBuilder();
+ } else {
+ return reqSize_;
+ }
+ }
+ /**
+ * <code>optional .TimedQuota req_size = 2;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+ getReqSizeFieldBuilder() {
+ if (reqSizeBuilder_ == null) {
+ reqSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
+ reqSize_,
+ getParentForChildren(),
+ isClean());
+ reqSize_ = null;
+ }
+ return reqSizeBuilder_;
+ }
+
+ // optional .TimedQuota write_num = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeNumBuilder_;
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public boolean hasWriteNum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
+ if (writeNumBuilder_ == null) {
+ return writeNum_;
+ } else {
+ return writeNumBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public Builder setWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (writeNumBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ writeNum_ = value;
+ onChanged();
+ } else {
+ writeNumBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public Builder setWriteNum(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
+ if (writeNumBuilder_ == null) {
+ writeNum_ = builderForValue.build();
+ onChanged();
+ } else {
+ writeNumBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public Builder mergeWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (writeNumBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ writeNum_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
+ writeNum_ =
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeNum_).mergeFrom(value).buildPartial();
+ } else {
+ writeNum_ = value;
+ }
+ onChanged();
+ } else {
+ writeNumBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public Builder clearWriteNum() {
+ if (writeNumBuilder_ == null) {
+ writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ onChanged();
+ } else {
+ writeNumBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteNumBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getWriteNumFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
+ if (writeNumBuilder_ != null) {
+ return writeNumBuilder_.getMessageOrBuilder();
+ } else {
+ return writeNum_;
+ }
+ }
+ /**
+ * <code>optional .TimedQuota write_num = 3;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+ getWriteNumFieldBuilder() {
+ if (writeNumBuilder_ == null) {
+ writeNumBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
+ writeNum_,
+ getParentForChildren(),
+ isClean());
+ writeNum_ = null;
+ }
+ return writeNumBuilder_;
+ }
+
+ // optional .TimedQuota write_size = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeSizeBuilder_;
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public boolean hasWriteSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
+ if (writeSizeBuilder_ == null) {
+ return writeSize_;
+ } else {
+ return writeSizeBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public Builder setWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (writeSizeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ writeSize_ = value;
+ onChanged();
+ } else {
+ writeSizeBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public Builder setWriteSize(
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
+ if (writeSizeBuilder_ == null) {
+ writeSize_ = builderForValue.build();
+ onChanged();
+ } else {
+ writeSizeBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public Builder mergeWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
+ if (writeSizeBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008) &&
+ writeSize_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
+ writeSize_ =
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeSize_).mergeFrom(value).buildPartial();
+ } else {
+ writeSize_ = value;
+ }
+ onChanged();
+ } else {
+ writeSizeBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public Builder clearWriteSize() {
+ if (writeSizeBuilder_ == null) {
+ writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
+ onChanged();
+ } else {
+ writeSizeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteSizeBuilder() {
+ bitField0_ |= 0x00000008;
+ onChanged();
+ return getWriteSizeFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
+ if (writeSizeBuilder_ != null) {
+ return writeSizeBuilder_.getMessageOrBuilder();
+ } else {
+ return writeSize_;
+ }
+ }
+ /**
+ * <code>optional .TimedQuota write_size = 4;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+ getWriteSizeFi
<TRUNCATED>