You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/08/21 07:04:22 UTC
svn commit: r1516084 [13/43] - in /hbase/trunk: ./
hbase-client/src/main/java/org/apache/hadoop/hbase/
hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/
hbase-common/src/test/java/org/apache/hadoop/hbase/
hbase-protocol/src/main/java/org/apa...
Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java?rev=1516084&r1=1516083&r2=1516084&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java Wed Aug 21 05:04:20 2013
@@ -10,86 +10,192 @@ public final class FilterProtos {
}
public interface FilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required string name = 1;
+ /**
+ * <code>required string name = 1;</code>
+ */
boolean hasName();
- String getName();
-
+ /**
+ * <code>required string name = 1;</code>
+ */
+ java.lang.String getName();
+ /**
+ * <code>required string name = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getNameBytes();
+
// optional bytes serialized_filter = 2;
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
boolean hasSerializedFilter();
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
com.google.protobuf.ByteString getSerializedFilter();
}
+ /**
+ * Protobuf type {@code Filter}
+ */
public static final class Filter extends
com.google.protobuf.GeneratedMessage
implements FilterOrBuilder {
// Use Filter.newBuilder() to construct.
- private Filter(Builder builder) {
+ private Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private Filter(boolean noInit) {}
-
+ private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final Filter defaultInstance;
public static Filter getDefaultInstance() {
return defaultInstance;
}
-
+
public Filter getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private Filter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ name_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ serializedFilter_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<Filter> PARSER =
+ new com.google.protobuf.AbstractParser<Filter>() {
+ public Filter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new Filter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<Filter> getParserForType() {
+ return PARSER;
}
-
+
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
+ /**
+ * <code>required string name = 1;</code>
+ */
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- public String getName() {
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public java.lang.String getName() {
java.lang.Object ref = name_;
- if (ref instanceof String) {
- return (String) ref;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
- private com.google.protobuf.ByteString getNameBytes() {
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
java.lang.Object ref = name_;
- if (ref instanceof String) {
+ if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
-
+
// optional bytes serialized_filter = 2;
public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString serializedFilter_;
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public boolean hasSerializedFilter() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public com.google.protobuf.ByteString getSerializedFilter() {
return serializedFilter_;
}
-
+
private void initFields() {
name_ = "";
serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
@@ -98,7 +204,7 @@ public final class FilterProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
@@ -106,7 +212,7 @@ public final class FilterProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -118,12 +224,12 @@ public final class FilterProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -137,14 +243,14 @@ public final class FilterProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -154,7 +260,7 @@ public final class FilterProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) obj;
-
+
boolean result = true;
result = result && (hasName() == other.hasName());
if (hasName()) {
@@ -170,9 +276,13 @@ public final class FilterProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasName()) {
@@ -184,89 +294,79 @@ public final class FilterProtos {
hash = (53 * hash) + getSerializedFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code Filter}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder {
@@ -274,18 +374,21 @@ public final class FilterProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -296,7 +399,7 @@ public final class FilterProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
name_ = "";
@@ -305,20 +408,20 @@ public final class FilterProtos {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDescriptor();
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial();
if (!result.isInitialized()) {
@@ -326,17 +429,7 @@ public final class FilterProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter(this);
int from_bitField0_ = bitField0_;
@@ -353,7 +446,7 @@ public final class FilterProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)other);
@@ -362,11 +455,13 @@ public final class FilterProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) return this;
if (other.hasName()) {
- setName(other.getName());
+ bitField0_ |= 0x00000001;
+ name_ = other.name_;
+ onChanged();
}
if (other.hasSerializedFilter()) {
setSerializedFilter(other.getSerializedFilter());
@@ -374,7 +469,7 @@ public final class FilterProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasName()) {
@@ -382,62 +477,69 @@ public final class FilterProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- name_ = input.readBytes();
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- serializedFilter_ = input.readBytes();
- break;
- }
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required string name = 1;
private java.lang.Object name_ = "";
+ /**
+ * <code>required string name = 1;</code>
+ */
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
- public String getName() {
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public java.lang.String getName() {
java.lang.Object ref = name_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
name_ = s;
return s;
} else {
- return (String) ref;
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
}
}
- public Builder setName(String value) {
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public Builder setName(
+ java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@@ -446,26 +548,46 @@ public final class FilterProtos {
onChanged();
return this;
}
+ /**
+ * <code>required string name = 1;</code>
+ */
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
- void setName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000001;
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
name_ = value;
onChanged();
+ return this;
}
-
+
// optional bytes serialized_filter = 2;
private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public boolean hasSerializedFilter() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public com.google.protobuf.ByteString getSerializedFilter() {
return serializedFilter_;
}
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public Builder setSerializedFilter(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@@ -475,70 +597,152 @@ public final class FilterProtos {
onChanged();
return this;
}
+ /**
+ * <code>optional bytes serialized_filter = 2;</code>
+ */
public Builder clearSerializedFilter() {
bitField0_ = (bitField0_ & ~0x00000002);
serializedFilter_ = getDefaultInstance().getSerializedFilter();
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:Filter)
}
-
+
static {
defaultInstance = new Filter(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:Filter)
}
-
+
public interface ColumnCountGetFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required int32 limit = 1;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
boolean hasLimit();
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
int getLimit();
}
+ /**
+ * Protobuf type {@code ColumnCountGetFilter}
+ */
public static final class ColumnCountGetFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnCountGetFilterOrBuilder {
// Use ColumnCountGetFilter.newBuilder() to construct.
- private ColumnCountGetFilter(Builder builder) {
+ private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private ColumnCountGetFilter(boolean noInit) {}
-
+ private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final ColumnCountGetFilter defaultInstance;
public static ColumnCountGetFilter getDefaultInstance() {
return defaultInstance;
}
-
+
public ColumnCountGetFilter getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ColumnCountGetFilter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ limit_ = input.readInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ColumnCountGetFilter> PARSER =
+ new com.google.protobuf.AbstractParser<ColumnCountGetFilter>() {
+ public ColumnCountGetFilter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ColumnCountGetFilter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ColumnCountGetFilter> getParserForType() {
+ return PARSER;
}
-
+
private int bitField0_;
// required int32 limit = 1;
public static final int LIMIT_FIELD_NUMBER = 1;
private int limit_;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public int getLimit() {
return limit_;
}
-
+
private void initFields() {
limit_ = 0;
}
@@ -546,7 +750,7 @@ public final class FilterProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasLimit()) {
memoizedIsInitialized = 0;
return false;
@@ -554,7 +758,7 @@ public final class FilterProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -563,12 +767,12 @@ public final class FilterProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -578,14 +782,14 @@ public final class FilterProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -595,7 +799,7 @@ public final class FilterProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj;
-
+
boolean result = true;
result = result && (hasLimit() == other.hasLimit());
if (hasLimit()) {
@@ -606,9 +810,13 @@ public final class FilterProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLimit()) {
@@ -616,89 +824,79 @@ public final class FilterProtos {
hash = (53 * hash) + getLimit();
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code ColumnCountGetFilter}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder {
@@ -706,18 +904,21 @@ public final class FilterProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -728,27 +929,27 @@ public final class FilterProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
limit_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDescriptor();
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial();
if (!result.isInitialized()) {
@@ -756,17 +957,7 @@ public final class FilterProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this);
int from_bitField0_ = bitField0_;
@@ -779,7 +970,7 @@ public final class FilterProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other);
@@ -788,7 +979,7 @@ public final class FilterProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this;
if (other.hasLimit()) {
@@ -797,7 +988,7 @@ public final class FilterProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasLimit()) {
@@ -805,147 +996,257 @@ public final class FilterProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 8: {
- bitField0_ |= 0x00000001;
- limit_ = input.readInt32();
- break;
- }
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required int32 limit = 1;
private int limit_ ;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public int getLimit() {
return limit_;
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public Builder setLimit(int value) {
bitField0_ |= 0x00000001;
limit_ = value;
onChanged();
return this;
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public Builder clearLimit() {
bitField0_ = (bitField0_ & ~0x00000001);
limit_ = 0;
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:ColumnCountGetFilter)
}
-
+
static {
defaultInstance = new ColumnCountGetFilter(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:ColumnCountGetFilter)
}
-
+
public interface ColumnPaginationFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required int32 limit = 1;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
boolean hasLimit();
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
int getLimit();
-
+
// optional int32 offset = 2;
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
boolean hasOffset();
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
int getOffset();
-
+
// optional bytes column_offset = 3;
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
boolean hasColumnOffset();
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
com.google.protobuf.ByteString getColumnOffset();
}
+ /**
+ * Protobuf type {@code ColumnPaginationFilter}
+ */
public static final class ColumnPaginationFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnPaginationFilterOrBuilder {
// Use ColumnPaginationFilter.newBuilder() to construct.
- private ColumnPaginationFilter(Builder builder) {
+ private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private ColumnPaginationFilter(boolean noInit) {}
-
+ private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final ColumnPaginationFilter defaultInstance;
public static ColumnPaginationFilter getDefaultInstance() {
return defaultInstance;
}
-
+
public ColumnPaginationFilter getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ColumnPaginationFilter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ limit_ = input.readInt32();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ offset_ = input.readInt32();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ columnOffset_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ColumnPaginationFilter> PARSER =
+ new com.google.protobuf.AbstractParser<ColumnPaginationFilter>() {
+ public ColumnPaginationFilter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ColumnPaginationFilter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ColumnPaginationFilter> getParserForType() {
+ return PARSER;
}
-
+
private int bitField0_;
// required int32 limit = 1;
public static final int LIMIT_FIELD_NUMBER = 1;
private int limit_;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public int getLimit() {
return limit_;
}
-
+
// optional int32 offset = 2;
public static final int OFFSET_FIELD_NUMBER = 2;
private int offset_;
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public int getOffset() {
return offset_;
}
-
+
// optional bytes column_offset = 3;
public static final int COLUMN_OFFSET_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString columnOffset_;
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public boolean hasColumnOffset() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public com.google.protobuf.ByteString getColumnOffset() {
return columnOffset_;
}
-
+
private void initFields() {
limit_ = 0;
offset_ = 0;
@@ -955,7 +1256,7 @@ public final class FilterProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasLimit()) {
memoizedIsInitialized = 0;
return false;
@@ -963,7 +1264,7 @@ public final class FilterProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -978,12 +1279,12 @@ public final class FilterProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -1001,14 +1302,14 @@ public final class FilterProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -1018,7 +1319,7 @@ public final class FilterProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj;
-
+
boolean result = true;
result = result && (hasLimit() == other.hasLimit());
if (hasLimit()) {
@@ -1039,9 +1340,13 @@ public final class FilterProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLimit()) {
@@ -1057,89 +1362,79 @@ public final class FilterProtos {
hash = (53 * hash) + getColumnOffset().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code ColumnPaginationFilter}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder {
@@ -1147,18 +1442,21 @@ public final class FilterProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -1169,7 +1467,7 @@ public final class FilterProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
limit_ = 0;
@@ -1180,20 +1478,20 @@ public final class FilterProtos {
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDescriptor();
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial();
if (!result.isInitialized()) {
@@ -1201,17 +1499,7 @@ public final class FilterProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this);
int from_bitField0_ = bitField0_;
@@ -1232,7 +1520,7 @@ public final class FilterProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other);
@@ -1241,7 +1529,7 @@ public final class FilterProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this;
if (other.hasLimit()) {
@@ -1256,7 +1544,7 @@ public final class FilterProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasLimit()) {
@@ -1264,101 +1552,109 @@ public final class FilterProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 8: {
- bitField0_ |= 0x00000001;
- limit_ = input.readInt32();
- break;
- }
- case 16: {
- bitField0_ |= 0x00000002;
- offset_ = input.readInt32();
- break;
- }
- case 26: {
- bitField0_ |= 0x00000004;
- columnOffset_ = input.readBytes();
- break;
- }
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required int32 limit = 1;
private int limit_ ;
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public boolean hasLimit() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public int getLimit() {
return limit_;
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public Builder setLimit(int value) {
bitField0_ |= 0x00000001;
limit_ = value;
onChanged();
return this;
}
+ /**
+ * <code>required int32 limit = 1;</code>
+ */
public Builder clearLimit() {
bitField0_ = (bitField0_ & ~0x00000001);
limit_ = 0;
onChanged();
return this;
}
-
+
// optional int32 offset = 2;
private int offset_ ;
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public boolean hasOffset() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public int getOffset() {
return offset_;
}
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public Builder setOffset(int value) {
bitField0_ |= 0x00000002;
offset_ = value;
onChanged();
return this;
}
+ /**
+ * <code>optional int32 offset = 2;</code>
+ */
public Builder clearOffset() {
bitField0_ = (bitField0_ & ~0x00000002);
offset_ = 0;
onChanged();
return this;
}
-
+
// optional bytes column_offset = 3;
private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public boolean hasColumnOffset() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public com.google.protobuf.ByteString getColumnOffset() {
return columnOffset_;
}
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public Builder setColumnOffset(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@@ -1368,70 +1664,152 @@ public final class FilterProtos {
onChanged();
return this;
}
+ /**
+ * <code>optional bytes column_offset = 3;</code>
+ */
public Builder clearColumnOffset() {
bitField0_ = (bitField0_ & ~0x00000004);
columnOffset_ = getDefaultInstance().getColumnOffset();
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:ColumnPaginationFilter)
}
-
+
static {
defaultInstance = new ColumnPaginationFilter(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:ColumnPaginationFilter)
}
-
+
public interface ColumnPrefixFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required bytes prefix = 1;
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
boolean hasPrefix();
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
com.google.protobuf.ByteString getPrefix();
}
+ /**
+ * Protobuf type {@code ColumnPrefixFilter}
+ */
public static final class ColumnPrefixFilter extends
com.google.protobuf.GeneratedMessage
implements ColumnPrefixFilterOrBuilder {
// Use ColumnPrefixFilter.newBuilder() to construct.
- private ColumnPrefixFilter(Builder builder) {
+ private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private ColumnPrefixFilter(boolean noInit) {}
-
+ private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final ColumnPrefixFilter defaultInstance;
public static ColumnPrefixFilter getDefaultInstance() {
return defaultInstance;
}
-
+
public ColumnPrefixFilter getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ColumnPrefixFilter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ prefix_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ColumnPrefixFilter> PARSER =
+ new com.google.protobuf.AbstractParser<ColumnPrefixFilter>() {
+ public ColumnPrefixFilter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ColumnPrefixFilter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ColumnPrefixFilter> getParserForType() {
+ return PARSER;
}
-
+
private int bitField0_;
// required bytes prefix = 1;
public static final int PREFIX_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString prefix_;
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
-
+
private void initFields() {
prefix_ = com.google.protobuf.ByteString.EMPTY;
}
@@ -1439,7 +1817,7 @@ public final class FilterProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasPrefix()) {
memoizedIsInitialized = 0;
return false;
@@ -1447,7 +1825,7 @@ public final class FilterProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -1456,12 +1834,12 @@ public final class FilterProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -1471,14 +1849,14 @@ public final class FilterProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -1488,7 +1866,7 @@ public final class FilterProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj;
-
+
boolean result = true;
result = result && (hasPrefix() == other.hasPrefix());
if (hasPrefix()) {
@@ -1499,9 +1877,13 @@ public final class FilterProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasPrefix()) {
@@ -1509,89 +1891,79 @@ public final class FilterProtos {
hash = (53 * hash) + getPrefix().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code ColumnPrefixFilter}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder {
@@ -1599,18 +1971,21 @@ public final class FilterProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable;
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -1621,27 +1996,27 @@ public final class FilterProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
prefix_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDescriptor();
+ return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial();
if (!result.isInitialized()) {
@@ -1649,17 +2024,7 @@ public final class FilterProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this);
int from_bitField0_ = bitField0_;
@@ -1672,7 +2037,7 @@ public final class FilterProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other);
@@ -1681,7 +2046,7 @@ public final class FilterProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this;
if (other.hasPrefix()) {
@@ -1690,7 +2055,7 @@ public final class FilterProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasPrefix()) {
@@ -1698,49 +2063,43 @@ public final class FilterProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- prefix_ = input.readBytes();
- break;
- }
+ org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required bytes prefix = 1;
private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public boolean hasPrefix() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public com.google.protobuf.ByteString getPrefix() {
return prefix_;
}
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public Builder setPrefix(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@@ -1750,112 +2109,245 @@ public final class FilterProtos {
onChanged();
return this;
}
+ /**
+ * <code>required bytes prefix = 1;</code>
+ */
public Builder clearPrefix() {
bitField0_ = (bitField0_ & ~0x00000001);
prefix_ = getDefaultInstance().getPrefix();
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:ColumnPrefixFilter)
}
-
+
static {
defaultInstance = new ColumnPrefixFilter(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:ColumnPrefixFilter)
}
-
+
public interface ColumnRangeFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// optional bytes min_column = 1;
+ /**
+ * <code>optional bytes min_column = 1;</code>
+ */
boolean hasMinColumn();
+ /**
+ * <code>optional bytes min_column = 1;</code>
+ */
com.google.protobuf.ByteString getMinColumn();
-
+
// optional bool min_column_inclusive = 2;
+ /**
+ * <code>optional bool min_column_inclusive = 2;</code>
+ */
boolean hasMinColumnInclusive();
+ /**
+ * <code>optional bool min_column_inclusive = 2;</code>
+ */
boolean getMinColumnInclusive();
-
+
// optional bytes max_column = 3;
+ /**
+ * <code>optional bytes max_column = 3;</code>
+ */
boolean hasMaxColumn();
+ /**
+ * <code>optional bytes max_column = 3;</code>
+ */
com.google.protobuf.ByteString getMaxColumn();
-
+
// optional bool max_column_inclusive = 4;
+ /**
+ * <code>optional bool max_column_inclusive = 4;</code>
+ */
boolean hasMaxColumnInclusive();
+ /**
+ * <code>optional bool max_column_inclusive = 4;</code>
+ */
boolean getMaxColumnInclusive();
}
+ /**
[... 14723 lines stripped ...]