You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/04/11 05:52:57 UTC
svn commit: r1466761 [8/41] - in /hbase/branches/0.95: ./
hbase-client/src/main/java/org/apache/hadoop/hbase/
hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/
hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/
hbase-pr...
Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java?rev=1466761&r1=1466760&r2=1466761&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java Thu Apr 11 03:52:56 2013
@@ -10,347 +10,78 @@ public final class ClusterStatusProtos {
}
public interface RegionStateOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required .RegionInfo regionInfo = 1;
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
boolean hasRegionInfo();
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
-
+
// required .RegionState.State state = 2;
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
boolean hasState();
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState();
-
+
// optional uint64 stamp = 3;
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
boolean hasStamp();
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
long getStamp();
}
- /**
- * Protobuf type {@code RegionState}
- */
public static final class RegionState extends
com.google.protobuf.GeneratedMessage
implements RegionStateOrBuilder {
// Use RegionState.newBuilder() to construct.
- private RegionState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ private RegionState(Builder builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private RegionState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+ private RegionState(boolean noInit) {}
+
private static final RegionState defaultInstance;
public static RegionState getDefaultInstance() {
return defaultInstance;
}
-
+
public RegionState getDefaultInstanceForType() {
return defaultInstance;
}
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private RegionState(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = regionInfo_.toBuilder();
- }
- regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(regionInfo_);
- regionInfo_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000001;
- break;
- }
- case 16: {
- int rawValue = input.readEnum();
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue);
- if (value == null) {
- unknownFields.mergeVarintField(2, rawValue);
- } else {
- bitField0_ |= 0x00000002;
- state_ = value;
- }
- break;
- }
- case 24: {
- bitField0_ |= 0x00000004;
- stamp_ = input.readUInt64();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
+
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class);
- }
-
- public static com.google.protobuf.Parser<RegionState> PARSER =
- new com.google.protobuf.AbstractParser<RegionState>() {
- public RegionState parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegionState(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<RegionState> getParserForType() {
- return PARSER;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable;
}
-
- /**
- * Protobuf enum {@code RegionState.State}
- */
+
public enum State
implements com.google.protobuf.ProtocolMessageEnum {
- /**
- * <code>OFFLINE = 0;</code>
- *
- * <pre>
- * region is in an offline state
- * </pre>
- */
OFFLINE(0, 0),
- /**
- * <code>PENDING_OPEN = 1;</code>
- *
- * <pre>
- * sent rpc to server to open but has not begun
- * </pre>
- */
PENDING_OPEN(1, 1),
- /**
- * <code>OPENING = 2;</code>
- *
- * <pre>
- * server has begun to open but not yet done
- * </pre>
- */
OPENING(2, 2),
- /**
- * <code>OPEN = 3;</code>
- *
- * <pre>
- * server opened region and updated meta
- * </pre>
- */
OPEN(3, 3),
- /**
- * <code>PENDING_CLOSE = 4;</code>
- *
- * <pre>
- * sent rpc to server to close but has not begun
- * </pre>
- */
PENDING_CLOSE(4, 4),
- /**
- * <code>CLOSING = 5;</code>
- *
- * <pre>
- * server has begun to close but not yet done
- * </pre>
- */
CLOSING(5, 5),
- /**
- * <code>CLOSED = 6;</code>
- *
- * <pre>
- * server closed region and updated meta
- * </pre>
- */
CLOSED(6, 6),
- /**
- * <code>SPLITTING = 7;</code>
- *
- * <pre>
- * server started split of a region
- * </pre>
- */
SPLITTING(7, 7),
- /**
- * <code>SPLIT = 8;</code>
- *
- * <pre>
- * server completed split of a region
- * </pre>
- */
SPLIT(8, 8),
- /**
- * <code>FAILED_OPEN = 9;</code>
- *
- * <pre>
- * failed to open, and won't retry any more
- * </pre>
- */
FAILED_OPEN(9, 9),
- /**
- * <code>FAILED_CLOSE = 10;</code>
- *
- * <pre>
- * failed to close, and won't retry any more
- * </pre>
- */
FAILED_CLOSE(10, 10),
;
-
- /**
- * <code>OFFLINE = 0;</code>
- *
- * <pre>
- * region is in an offline state
- * </pre>
- */
+
public static final int OFFLINE_VALUE = 0;
- /**
- * <code>PENDING_OPEN = 1;</code>
- *
- * <pre>
- * sent rpc to server to open but has not begun
- * </pre>
- */
public static final int PENDING_OPEN_VALUE = 1;
- /**
- * <code>OPENING = 2;</code>
- *
- * <pre>
- * server has begun to open but not yet done
- * </pre>
- */
public static final int OPENING_VALUE = 2;
- /**
- * <code>OPEN = 3;</code>
- *
- * <pre>
- * server opened region and updated meta
- * </pre>
- */
public static final int OPEN_VALUE = 3;
- /**
- * <code>PENDING_CLOSE = 4;</code>
- *
- * <pre>
- * sent rpc to server to close but has not begun
- * </pre>
- */
public static final int PENDING_CLOSE_VALUE = 4;
- /**
- * <code>CLOSING = 5;</code>
- *
- * <pre>
- * server has begun to close but not yet done
- * </pre>
- */
public static final int CLOSING_VALUE = 5;
- /**
- * <code>CLOSED = 6;</code>
- *
- * <pre>
- * server closed region and updated meta
- * </pre>
- */
public static final int CLOSED_VALUE = 6;
- /**
- * <code>SPLITTING = 7;</code>
- *
- * <pre>
- * server started split of a region
- * </pre>
- */
public static final int SPLITTING_VALUE = 7;
- /**
- * <code>SPLIT = 8;</code>
- *
- * <pre>
- * server completed split of a region
- * </pre>
- */
public static final int SPLIT_VALUE = 8;
- /**
- * <code>FAILED_OPEN = 9;</code>
- *
- * <pre>
- * failed to open, and won't retry any more
- * </pre>
- */
public static final int FAILED_OPEN_VALUE = 9;
- /**
- * <code>FAILED_CLOSE = 10;</code>
- *
- * <pre>
- * failed to close, and won't retry any more
- * </pre>
- */
public static final int FAILED_CLOSE_VALUE = 10;
-
-
+
+
public final int getNumber() { return value; }
-
+
public static State valueOf(int value) {
switch (value) {
case 0: return OFFLINE;
@@ -367,7 +98,7 @@ public final class ClusterStatusProtos {
default: return null;
}
}
-
+
public static com.google.protobuf.Internal.EnumLiteMap<State>
internalGetValueMap() {
return internalValueMap;
@@ -379,7 +110,7 @@ public final class ClusterStatusProtos {
return State.valueOf(number);
}
};
-
+
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
@@ -392,9 +123,11 @@ public final class ClusterStatusProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor().getEnumTypes().get(0);
}
-
- private static final State[] VALUES = values();
-
+
+ private static final State[] VALUES = {
+ OFFLINE, PENDING_OPEN, OPENING, OPEN, PENDING_CLOSE, CLOSING, CLOSED, SPLITTING, SPLIT, FAILED_OPEN, FAILED_CLOSE,
+ };
+
public static State valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
@@ -403,73 +136,52 @@ public final class ClusterStatusProtos {
}
return VALUES[desc.getIndex()];
}
-
+
private final int index;
private final int value;
-
+
private State(int index, int value) {
this.index = index;
this.value = value;
}
-
+
// @@protoc_insertion_point(enum_scope:RegionState.State)
}
-
+
private int bitField0_;
// required .RegionInfo regionInfo = 1;
public static final int REGIONINFO_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public boolean hasRegionInfo() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
return regionInfo_;
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
return regionInfo_;
}
-
+
// required .RegionState.State state = 2;
public static final int STATE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_;
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public boolean hasState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() {
return state_;
}
-
+
// optional uint64 stamp = 3;
public static final int STAMP_FIELD_NUMBER = 3;
private long stamp_;
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public boolean hasStamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public long getStamp() {
return stamp_;
}
-
+
private void initFields() {
regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
@@ -479,7 +191,7 @@ public final class ClusterStatusProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasRegionInfo()) {
memoizedIsInitialized = 0;
return false;
@@ -495,7 +207,7 @@ public final class ClusterStatusProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -510,12 +222,12 @@ public final class ClusterStatusProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -533,14 +245,14 @@ public final class ClusterStatusProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -550,7 +262,7 @@ public final class ClusterStatusProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) obj;
-
+
boolean result = true;
result = result && (hasRegionInfo() == other.hasRegionInfo());
if (hasRegionInfo()) {
@@ -571,13 +283,9 @@ public final class ClusterStatusProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
- private int memoizedHashCode = 0;
+
@java.lang.Override
public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegionInfo()) {
@@ -593,79 +301,89 @@ public final class ClusterStatusProtos {
hash = (53 * hash) + hashLong(getStamp());
}
hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
- /**
- * Protobuf type {@code RegionState}
- */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder {
@@ -673,21 +391,18 @@ public final class ClusterStatusProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class);
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable;
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+
+ private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -699,7 +414,7 @@ public final class ClusterStatusProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
if (regionInfoBuilder_ == null) {
@@ -714,20 +429,20 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState build() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial();
if (!result.isInitialized()) {
@@ -735,7 +450,17 @@ public final class ClusterStatusProtos {
}
return result;
}
-
+
+ private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState(this);
int from_bitField0_ = bitField0_;
@@ -760,7 +485,7 @@ public final class ClusterStatusProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)other);
@@ -769,7 +494,7 @@ public final class ClusterStatusProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) return this;
if (other.hasRegionInfo()) {
@@ -784,7 +509,7 @@ public final class ClusterStatusProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasRegionInfo()) {
@@ -800,39 +525,68 @@ public final class ClusterStatusProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder();
+ if (hasRegionInfo()) {
+ subBuilder.mergeFrom(getRegionInfo());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegionInfo(subBuilder.buildPartial());
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ state_ = value;
+ }
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ stamp_ = input.readUInt64();
+ break;
+ }
}
}
- return this;
}
+
private int bitField0_;
-
+
// required .RegionInfo regionInfo = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public boolean hasRegionInfo() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
if (regionInfoBuilder_ == null) {
return regionInfo_;
@@ -840,9 +594,6 @@ public final class ClusterStatusProtos {
return regionInfoBuilder_.getMessage();
}
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
@@ -856,9 +607,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public Builder setRegionInfo(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
@@ -870,9 +618,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -889,9 +634,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public Builder clearRegionInfo() {
if (regionInfoBuilder_ == null) {
regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
@@ -902,17 +644,11 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionInfoFieldBuilder().getBuilder();
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
if (regionInfoBuilder_ != null) {
return regionInfoBuilder_.getMessageOrBuilder();
@@ -920,9 +656,6 @@ public final class ClusterStatusProtos {
return regionInfo_;
}
}
- /**
- * <code>required .RegionInfo regionInfo = 1;</code>
- */
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoFieldBuilder() {
@@ -936,24 +669,15 @@ public final class ClusterStatusProtos {
}
return regionInfoBuilder_;
}
-
+
// required .RegionState.State state = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public boolean hasState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() {
return state_;
}
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) {
if (value == null) {
throw new NullPointerException();
@@ -963,252 +687,113 @@ public final class ClusterStatusProtos {
onChanged();
return this;
}
- /**
- * <code>required .RegionState.State state = 2;</code>
- */
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
onChanged();
return this;
}
-
+
// optional uint64 stamp = 3;
private long stamp_ ;
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public boolean hasStamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public long getStamp() {
return stamp_;
}
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public Builder setStamp(long value) {
bitField0_ |= 0x00000004;
stamp_ = value;
onChanged();
return this;
}
- /**
- * <code>optional uint64 stamp = 3;</code>
- */
public Builder clearStamp() {
bitField0_ = (bitField0_ & ~0x00000004);
stamp_ = 0L;
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:RegionState)
}
-
+
static {
defaultInstance = new RegionState(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:RegionState)
}
-
+
public interface RegionInTransitionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required .RegionSpecifier spec = 1;
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
boolean hasSpec();
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec();
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder();
-
+
// required .RegionState regionState = 2;
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
boolean hasRegionState();
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState();
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder();
}
- /**
- * Protobuf type {@code RegionInTransition}
- */
public static final class RegionInTransition extends
com.google.protobuf.GeneratedMessage
implements RegionInTransitionOrBuilder {
// Use RegionInTransition.newBuilder() to construct.
- private RegionInTransition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ private RegionInTransition(Builder builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private RegionInTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+ private RegionInTransition(boolean noInit) {}
+
private static final RegionInTransition defaultInstance;
public static RegionInTransition getDefaultInstance() {
return defaultInstance;
}
-
+
public RegionInTransition getDefaultInstanceForType() {
return defaultInstance;
}
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private RegionInTransition(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = spec_.toBuilder();
- }
- spec_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(spec_);
- spec_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000001;
- break;
- }
- case 18: {
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = null;
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- subBuilder = regionState_.toBuilder();
- }
- regionState_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(regionState_);
- regionState_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000002;
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
+
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class);
- }
-
- public static com.google.protobuf.Parser<RegionInTransition> PARSER =
- new com.google.protobuf.AbstractParser<RegionInTransition>() {
- public RegionInTransition parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegionInTransition(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<RegionInTransition> getParserForType() {
- return PARSER;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable;
}
-
+
private int bitField0_;
// required .RegionSpecifier spec = 1;
public static final int SPEC_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_;
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public boolean hasSpec() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() {
return spec_;
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() {
return spec_;
}
-
+
// required .RegionState regionState = 2;
public static final int REGIONSTATE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_;
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public boolean hasRegionState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() {
return regionState_;
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() {
return regionState_;
}
-
+
private void initFields() {
spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance();
@@ -1217,7 +802,7 @@ public final class ClusterStatusProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasSpec()) {
memoizedIsInitialized = 0;
return false;
@@ -1237,7 +822,7 @@ public final class ClusterStatusProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -1249,12 +834,12 @@ public final class ClusterStatusProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -1268,14 +853,14 @@ public final class ClusterStatusProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -1285,7 +870,7 @@ public final class ClusterStatusProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) obj;
-
+
boolean result = true;
result = result && (hasSpec() == other.hasSpec());
if (hasSpec()) {
@@ -1301,13 +886,9 @@ public final class ClusterStatusProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
- private int memoizedHashCode = 0;
+
@java.lang.Override
public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSpec()) {
@@ -1319,79 +900,89 @@ public final class ClusterStatusProtos {
hash = (53 * hash) + getRegionState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
- /**
- * Protobuf type {@code RegionInTransition}
- */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder {
@@ -1399,21 +990,18 @@ public final class ClusterStatusProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class);
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable;
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+
+ private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -1426,7 +1014,7 @@ public final class ClusterStatusProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
if (specBuilder_ == null) {
@@ -1443,20 +1031,20 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDescriptor();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition build() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial();
if (!result.isInitialized()) {
@@ -1464,7 +1052,17 @@ public final class ClusterStatusProtos {
}
return result;
}
-
+
+ private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition(this);
int from_bitField0_ = bitField0_;
@@ -1489,7 +1087,7 @@ public final class ClusterStatusProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)other);
@@ -1498,7 +1096,7 @@ public final class ClusterStatusProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()) return this;
if (other.hasSpec()) {
@@ -1510,7 +1108,7 @@ public final class ClusterStatusProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasSpec()) {
@@ -1530,39 +1128,61 @@ public final class ClusterStatusProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+ if (hasSpec()) {
+ subBuilder.mergeFrom(getSpec());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setSpec(subBuilder.buildPartial());
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder();
+ if (hasRegionState()) {
+ subBuilder.mergeFrom(getRegionState());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setRegionState(subBuilder.buildPartial());
+ break;
+ }
}
}
- return this;
}
+
private int bitField0_;
-
+
// required .RegionSpecifier spec = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_;
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public boolean hasSpec() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() {
if (specBuilder_ == null) {
return spec_;
@@ -1570,9 +1190,6 @@ public final class ClusterStatusProtos {
return specBuilder_.getMessage();
}
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public Builder setSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (specBuilder_ == null) {
if (value == null) {
@@ -1586,9 +1203,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public Builder setSpec(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (specBuilder_ == null) {
@@ -1600,9 +1214,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public Builder mergeSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (specBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -1619,9 +1230,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000001;
return this;
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public Builder clearSpec() {
if (specBuilder_ == null) {
spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
@@ -1632,17 +1240,11 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSpecFieldBuilder().getBuilder();
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() {
if (specBuilder_ != null) {
return specBuilder_.getMessageOrBuilder();
@@ -1650,9 +1252,6 @@ public final class ClusterStatusProtos {
return spec_;
}
}
- /**
- * <code>required .RegionSpecifier spec = 1;</code>
- */
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getSpecFieldBuilder() {
@@ -1666,20 +1265,14 @@ public final class ClusterStatusProtos {
}
return specBuilder_;
}
-
+
// required .RegionState regionState = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_;
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public boolean hasRegionState() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() {
if (regionStateBuilder_ == null) {
return regionState_;
@@ -1687,9 +1280,6 @@ public final class ClusterStatusProtos {
return regionStateBuilder_.getMessage();
}
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public Builder setRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) {
if (regionStateBuilder_ == null) {
if (value == null) {
@@ -1703,9 +1293,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000002;
return this;
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public Builder setRegionState(
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder builderForValue) {
if (regionStateBuilder_ == null) {
@@ -1717,9 +1304,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000002;
return this;
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public Builder mergeRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) {
if (regionStateBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
@@ -1736,9 +1320,6 @@ public final class ClusterStatusProtos {
bitField0_ |= 0x00000002;
return this;
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public Builder clearRegionState() {
if (regionStateBuilder_ == null) {
regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance();
@@ -1749,17 +1330,11 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder getRegionStateBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRegionStateFieldBuilder().getBuilder();
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() {
if (regionStateBuilder_ != null) {
return regionStateBuilder_.getMessageOrBuilder();
@@ -1767,9 +1342,6 @@ public final class ClusterStatusProtos {
return regionState_;
}
}
- /**
- * <code>required .RegionState regionState = 2;</code>
- */
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>
getRegionStateFieldBuilder() {
@@ -1783,210 +1355,86 @@ public final class ClusterStatusProtos {
}
return regionStateBuilder_;
}
-
+
// @@protoc_insertion_point(builder_scope:RegionInTransition)
}
-
+
static {
defaultInstance = new RegionInTransition(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:RegionInTransition)
}
-
+
public interface LiveServerInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required .ServerName server = 1;
- /**
- * <code>required .ServerName server = 1;</code>
- */
boolean hasServer();
- /**
- * <code>required .ServerName server = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
- /**
- * <code>required .ServerName server = 1;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
-
+
// required .ServerLoad serverLoad = 2;
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
boolean hasServerLoad();
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad();
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder();
}
- /**
- * Protobuf type {@code LiveServerInfo}
- */
public static final class LiveServerInfo extends
com.google.protobuf.GeneratedMessage
implements LiveServerInfoOrBuilder {
// Use LiveServerInfo.newBuilder() to construct.
- private LiveServerInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ private LiveServerInfo(Builder builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private LiveServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+ private LiveServerInfo(boolean noInit) {}
+
private static final LiveServerInfo defaultInstance;
public static LiveServerInfo getDefaultInstance() {
return defaultInstance;
}
-
+
public LiveServerInfo getDefaultInstanceForType() {
return defaultInstance;
}
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private LiveServerInfo(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = server_.toBuilder();
- }
- server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(server_);
- server_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000001;
- break;
- }
- case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = null;
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- subBuilder = serverLoad_.toBuilder();
- }
- serverLoad_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(serverLoad_);
- serverLoad_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000002;
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
+
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class);
- }
-
- public static com.google.protobuf.Parser<LiveServerInfo> PARSER =
- new com.google.protobuf.AbstractParser<LiveServerInfo>() {
- public LiveServerInfo parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new LiveServerInfo(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<LiveServerInfo> getParserForType() {
- return PARSER;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable;
}
-
+
private int bitField0_;
// required .ServerName server = 1;
public static final int SERVER_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
- /**
- * <code>required .ServerName server = 1;</code>
- */
public boolean hasServer() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- /**
- * <code>required .ServerName server = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
return server_;
}
- /**
- * <code>required .ServerName server = 1;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
return server_;
}
-
+
// required .ServerLoad serverLoad = 2;
public static final int SERVERLOAD_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_;
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
public boolean hasServerLoad() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() {
return serverLoad_;
}
- /**
- * <code>required .ServerLoad serverLoad = 2;</code>
- */
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() {
return serverLoad_;
}
-
+
private void initFields() {
server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance();
@@ -1995,7 +1443,7 @@ public final class ClusterStatusProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasServer()) {
memoizedIsInitialized = 0;
return false;
@@ -2015,7 +1463,7 @@ public final class ClusterStatusProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -2027,12 +1475,12 @@ public final class ClusterStatusProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -2046,14 +1494,14 @@ public final class ClusterStatusProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -2063,7 +1511,7 @@ public final class ClusterStatusProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) obj;
-
+
boolean result = true;
result = result && (hasServer() == other.hasServer());
if (hasServer()) {
@@ -2079,13 +1527,9 @@ public final class ClusterStatusProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
- private int memoizedHashCode = 0;
+
@java.lang.Override
public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasServer()) {
@@ -2097,79 +1541,89 @@ public final class ClusterStatusProtos {
hash = (53 * hash) + getServerLoad().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
+ return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
- /**
- * Protobuf type {@code LiveServerInfo}
- */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder {
@@ -2177,21 +1631,18 @@ public final class ClusterStatusProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class);
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable;
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+
+ private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -2204,7 +1655,7 @@ public final class ClusterStatusProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
if (serverBuilder_ == null) {
@@ -2221,20 +1672,20 @@ public final class ClusterStatusProtos {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDescriptor();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo build() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial();
if (!result.isInitialized()) {
@@ -2242,7 +1693,17 @@ public final class ClusterStatusProtos {
}
return result;
}
-
+
+ private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo(this);
int from_bitField0_ = bitField0_;
@@ -2267,7 +1728,7 @@ public final class ClusterStatusProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other);
@@ -2276,7 +1737,7 @@ public final class ClusterStatusProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()) return this;
if (other.hasServer()) {
@@ -2288,7 +1749,7 @@ public final class ClusterStatusProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasServer()) {
@@ -2308,39 +1769,61 @@ public final class ClusterStatusProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
[... 2589 lines stripped ...]