You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/08/02 16:55:10 UTC
[44/52] [partial] hbase git commit: HBASE-17056 Remove checked in PB
generated files
http://git-wip-us.apache.org/repos/asf/hbase/blob/ee70b1d2/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
deleted file mode 100644
index 508790c..0000000
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
+++ /dev/null
@@ -1,2375 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: Aggregate.proto
-
-package org.apache.hadoop.hbase.protobuf.generated;
-
-public final class AggregateProtos {
- private AggregateProtos() {}
- public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
- }
- public interface AggregateRequestOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string interpreter_class_name = 1;
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- boolean hasInterpreterClassName();
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- java.lang.String getInterpreterClassName();
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- com.google.protobuf.ByteString
- getInterpreterClassNameBytes();
-
- // required .hbase.pb.Scan scan = 2;
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- boolean hasScan();
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
-
- // optional bytes interpreter_specific_bytes = 3;
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- boolean hasInterpreterSpecificBytes();
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- com.google.protobuf.ByteString getInterpreterSpecificBytes();
- }
- /**
- * Protobuf type {@code hbase.pb.AggregateRequest}
- */
- public static final class AggregateRequest extends
- com.google.protobuf.GeneratedMessage
- implements AggregateRequestOrBuilder {
- // Use AggregateRequest.newBuilder() to construct.
- private AggregateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private AggregateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final AggregateRequest defaultInstance;
- public static AggregateRequest getDefaultInstance() {
- return defaultInstance;
- }
-
- public AggregateRequest getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private AggregateRequest(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- interpreterClassName_ = input.readBytes();
- break;
- }
- case 18: {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- subBuilder = scan_.toBuilder();
- }
- scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(scan_);
- scan_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000002;
- break;
- }
- case 26: {
- bitField0_ |= 0x00000004;
- interpreterSpecificBytes_ = input.readBytes();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
- }
-
- public static com.google.protobuf.Parser<AggregateRequest> PARSER =
- new com.google.protobuf.AbstractParser<AggregateRequest>() {
- public AggregateRequest parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new AggregateRequest(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<AggregateRequest> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // required string interpreter_class_name = 1;
- public static final int INTERPRETER_CLASS_NAME_FIELD_NUMBER = 1;
- private java.lang.Object interpreterClassName_;
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public boolean hasInterpreterClassName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public java.lang.String getInterpreterClassName() {
- java.lang.Object ref = interpreterClassName_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- interpreterClassName_ = s;
- }
- return s;
- }
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public com.google.protobuf.ByteString
- getInterpreterClassNameBytes() {
- java.lang.Object ref = interpreterClassName_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- interpreterClassName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // required .hbase.pb.Scan scan = 2;
- public static final int SCAN_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public boolean hasScan() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
- return scan_;
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
- return scan_;
- }
-
- // optional bytes interpreter_specific_bytes = 3;
- public static final int INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER = 3;
- private com.google.protobuf.ByteString interpreterSpecificBytes_;
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public boolean hasInterpreterSpecificBytes() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
- return interpreterSpecificBytes_;
- }
-
- private void initFields() {
- interpreterClassName_ = "";
- scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
- interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasInterpreterClassName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasScan()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!getScan().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getInterpreterClassNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeMessage(2, scan_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeBytes(3, interpreterSpecificBytes_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getInterpreterClassNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, scan_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(3, interpreterSpecificBytes_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
-
- boolean result = true;
- result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
- if (hasInterpreterClassName()) {
- result = result && getInterpreterClassName()
- .equals(other.getInterpreterClassName());
- }
- result = result && (hasScan() == other.hasScan());
- if (hasScan()) {
- result = result && getScan()
- .equals(other.getScan());
- }
- result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
- if (hasInterpreterSpecificBytes()) {
- result = result && getInterpreterSpecificBytes()
- .equals(other.getInterpreterSpecificBytes());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- private int memoizedHashCode = 0;
- @java.lang.Override
- public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasInterpreterClassName()) {
- hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
- hash = (53 * hash) + getInterpreterClassName().hashCode();
- }
- if (hasScan()) {
- hash = (37 * hash) + SCAN_FIELD_NUMBER;
- hash = (53 * hash) + getScan().hashCode();
- }
- if (hasInterpreterSpecificBytes()) {
- hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
- hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code hbase.pb.AggregateRequest}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequestOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getScanFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- interpreterClassName_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- if (scanBuilder_ == null) {
- scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
- } else {
- scanBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000004);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest build() {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.interpreterClassName_ = interpreterClassName_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- if (scanBuilder_ == null) {
- result.scan_ = scan_;
- } else {
- result.scan_ = scanBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- result.interpreterSpecificBytes_ = interpreterSpecificBytes_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this;
- if (other.hasInterpreterClassName()) {
- bitField0_ |= 0x00000001;
- interpreterClassName_ = other.interpreterClassName_;
- onChanged();
- }
- if (other.hasScan()) {
- mergeScan(other.getScan());
- }
- if (other.hasInterpreterSpecificBytes()) {
- setInterpreterSpecificBytes(other.getInterpreterSpecificBytes());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasInterpreterClassName()) {
-
- return false;
- }
- if (!hasScan()) {
-
- return false;
- }
- if (!getScan().isInitialized()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // required string interpreter_class_name = 1;
- private java.lang.Object interpreterClassName_ = "";
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public boolean hasInterpreterClassName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public java.lang.String getInterpreterClassName() {
- java.lang.Object ref = interpreterClassName_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- interpreterClassName_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public com.google.protobuf.ByteString
- getInterpreterClassNameBytes() {
- java.lang.Object ref = interpreterClassName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- interpreterClassName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public Builder setInterpreterClassName(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- interpreterClassName_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public Builder clearInterpreterClassName() {
- bitField0_ = (bitField0_ & ~0x00000001);
- interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
- onChanged();
- return this;
- }
- /**
- * <code>required string interpreter_class_name = 1;</code>
- *
- * <pre>
- ** The request passed to the AggregateService consists of three parts
- * (1) the (canonical) classname of the ColumnInterpreter implementation
- * (2) the Scan query
- * (3) any bytes required to construct the ColumnInterpreter object
- * properly
- * </pre>
- */
- public Builder setInterpreterClassNameBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- interpreterClassName_ = value;
- onChanged();
- return this;
- }
-
- // required .hbase.pb.Scan scan = 2;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public boolean hasScan() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
- if (scanBuilder_ == null) {
- return scan_;
- } else {
- return scanBuilder_.getMessage();
- }
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
- if (scanBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- scan_ = value;
- onChanged();
- } else {
- scanBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public Builder setScan(
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
- if (scanBuilder_ == null) {
- scan_ = builderForValue.build();
- onChanged();
- } else {
- scanBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
- if (scanBuilder_ == null) {
- if (((bitField0_ & 0x00000002) == 0x00000002) &&
- scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
- scan_ =
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
- } else {
- scan_ = value;
- }
- onChanged();
- } else {
- scanBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public Builder clearScan() {
- if (scanBuilder_ == null) {
- scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
- onChanged();
- } else {
- scanBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
- bitField0_ |= 0x00000002;
- onChanged();
- return getScanFieldBuilder().getBuilder();
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
- if (scanBuilder_ != null) {
- return scanBuilder_.getMessageOrBuilder();
- } else {
- return scan_;
- }
- }
- /**
- * <code>required .hbase.pb.Scan scan = 2;</code>
- */
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
- getScanFieldBuilder() {
- if (scanBuilder_ == null) {
- scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
- scan_,
- getParentForChildren(),
- isClean());
- scan_ = null;
- }
- return scanBuilder_;
- }
-
- // optional bytes interpreter_specific_bytes = 3;
- private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public boolean hasInterpreterSpecificBytes() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
- return interpreterSpecificBytes_;
- }
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000004;
- interpreterSpecificBytes_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional bytes interpreter_specific_bytes = 3;</code>
- */
- public Builder clearInterpreterSpecificBytes() {
- bitField0_ = (bitField0_ & ~0x00000004);
- interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes();
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateRequest)
- }
-
- static {
- defaultInstance = new AggregateRequest(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:hbase.pb.AggregateRequest)
- }
-
- public interface AggregateResponseOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // repeated bytes first_part = 1;
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- java.util.List<com.google.protobuf.ByteString> getFirstPartList();
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- int getFirstPartCount();
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- com.google.protobuf.ByteString getFirstPart(int index);
-
- // optional bytes second_part = 2;
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- boolean hasSecondPart();
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- com.google.protobuf.ByteString getSecondPart();
- }
- /**
- * Protobuf type {@code hbase.pb.AggregateResponse}
- */
- public static final class AggregateResponse extends
- com.google.protobuf.GeneratedMessage
- implements AggregateResponseOrBuilder {
- // Use AggregateResponse.newBuilder() to construct.
- private AggregateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private AggregateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final AggregateResponse defaultInstance;
- public static AggregateResponse getDefaultInstance() {
- return defaultInstance;
- }
-
- public AggregateResponse getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private AggregateResponse(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- firstPart_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
- mutable_bitField0_ |= 0x00000001;
- }
- firstPart_.add(input.readBytes());
- break;
- }
- case 18: {
- bitField0_ |= 0x00000001;
- secondPart_ = input.readBytes();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- firstPart_ = java.util.Collections.unmodifiableList(firstPart_);
- }
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class);
- }
-
- public static com.google.protobuf.Parser<AggregateResponse> PARSER =
- new com.google.protobuf.AbstractParser<AggregateResponse>() {
- public AggregateResponse parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new AggregateResponse(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<AggregateResponse> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // repeated bytes first_part = 1;
- public static final int FIRST_PART_FIELD_NUMBER = 1;
- private java.util.List<com.google.protobuf.ByteString> firstPart_;
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public java.util.List<com.google.protobuf.ByteString>
- getFirstPartList() {
- return firstPart_;
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public int getFirstPartCount() {
- return firstPart_.size();
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public com.google.protobuf.ByteString getFirstPart(int index) {
- return firstPart_.get(index);
- }
-
- // optional bytes second_part = 2;
- public static final int SECOND_PART_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString secondPart_;
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public boolean hasSecondPart() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public com.google.protobuf.ByteString getSecondPart() {
- return secondPart_;
- }
-
- private void initFields() {
- firstPart_ = java.util.Collections.emptyList();
- secondPart_ = com.google.protobuf.ByteString.EMPTY;
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- for (int i = 0; i < firstPart_.size(); i++) {
- output.writeBytes(1, firstPart_.get(i));
- }
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(2, secondPart_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- {
- int dataSize = 0;
- for (int i = 0; i < firstPart_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
- .computeBytesSizeNoTag(firstPart_.get(i));
- }
- size += dataSize;
- size += 1 * getFirstPartList().size();
- }
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, secondPart_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj;
-
- boolean result = true;
- result = result && getFirstPartList()
- .equals(other.getFirstPartList());
- result = result && (hasSecondPart() == other.hasSecondPart());
- if (hasSecondPart()) {
- result = result && getSecondPart()
- .equals(other.getSecondPart());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- private int memoizedHashCode = 0;
- @java.lang.Override
- public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (getFirstPartCount() > 0) {
- hash = (37 * hash) + FIRST_PART_FIELD_NUMBER;
- hash = (53 * hash) + getFirstPartList().hashCode();
- }
- if (hasSecondPart()) {
- hash = (37 * hash) + SECOND_PART_FIELD_NUMBER;
- hash = (53 * hash) + getSecondPart().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code hbase.pb.AggregateResponse}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class);
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- firstPart_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
- secondPart_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- firstPart_ = java.util.Collections.unmodifiableList(firstPart_);
- bitField0_ = (bitField0_ & ~0x00000001);
- }
- result.firstPart_ = firstPart_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000001;
- }
- result.secondPart_ = secondPart_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this;
- if (!other.firstPart_.isEmpty()) {
- if (firstPart_.isEmpty()) {
- firstPart_ = other.firstPart_;
- bitField0_ = (bitField0_ & ~0x00000001);
- } else {
- ensureFirstPartIsMutable();
- firstPart_.addAll(other.firstPart_);
- }
- onChanged();
- }
- if (other.hasSecondPart()) {
- setSecondPart(other.getSecondPart());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // repeated bytes first_part = 1;
- private java.util.List<com.google.protobuf.ByteString> firstPart_ = java.util.Collections.emptyList();
- private void ensureFirstPartIsMutable() {
- if (!((bitField0_ & 0x00000001) == 0x00000001)) {
- firstPart_ = new java.util.ArrayList<com.google.protobuf.ByteString>(firstPart_);
- bitField0_ |= 0x00000001;
- }
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public java.util.List<com.google.protobuf.ByteString>
- getFirstPartList() {
- return java.util.Collections.unmodifiableList(firstPart_);
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public int getFirstPartCount() {
- return firstPart_.size();
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public com.google.protobuf.ByteString getFirstPart(int index) {
- return firstPart_.get(index);
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public Builder setFirstPart(
- int index, com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureFirstPartIsMutable();
- firstPart_.set(index, value);
- onChanged();
- return this;
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public Builder addFirstPart(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureFirstPartIsMutable();
- firstPart_.add(value);
- onChanged();
- return this;
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public Builder addAllFirstPart(
- java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
- ensureFirstPartIsMutable();
- super.addAll(values, firstPart_);
- onChanged();
- return this;
- }
- /**
- * <code>repeated bytes first_part = 1;</code>
- *
- * <pre>
- **
- * The AggregateService methods all have a response that either is a Pair
- * or a simple object. When it is a Pair both first_part and second_part
- * have defined values (and the second_part is not present in the response
- * when the response is not a pair). Refer to the AggregateImplementation
- * class for an overview of the AggregateResponse object constructions.
- * </pre>
- */
- public Builder clearFirstPart() {
- firstPart_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
- onChanged();
- return this;
- }
-
- // optional bytes second_part = 2;
- private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY;
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public boolean hasSecondPart() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public com.google.protobuf.ByteString getSecondPart() {
- return secondPart_;
- }
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public Builder setSecondPart(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- secondPart_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional bytes second_part = 2;</code>
- */
- public Builder clearSecondPart() {
- bitField0_ = (bitField0_ & ~0x00000002);
- secondPart_ = getDefaultInstance().getSecondPart();
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateResponse)
- }
-
- static {
- defaultInstance = new AggregateResponse(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:hbase.pb.AggregateResponse)
- }
-
- /**
- * Protobuf service {@code hbase.pb.AggregateService}
- *
- * <pre>
- ** Refer to the AggregateImplementation class for an overview of the
- * AggregateService method implementations and their functionality.
- * </pre>
- */
- public static abstract class AggregateService
- implements com.google.protobuf.Service {
- protected AggregateService() {}
-
- public interface Interface {
- /**
- * <code>rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMax(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMin(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getSum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getRowNum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getAvg(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getStd(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMedian(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- }
-
- public static com.google.protobuf.Service newReflectiveService(
- final Interface impl) {
- return new AggregateService() {
- @java.lang.Override
- public void getMax(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getMax(controller, request, done);
- }
-
- @java.lang.Override
- public void getMin(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getMin(controller, request, done);
- }
-
- @java.lang.Override
- public void getSum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getSum(controller, request, done);
- }
-
- @java.lang.Override
- public void getRowNum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getRowNum(controller, request, done);
- }
-
- @java.lang.Override
- public void getAvg(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getAvg(controller, request, done);
- }
-
- @java.lang.Override
- public void getStd(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getStd(controller, request, done);
- }
-
- @java.lang.Override
- public void getMedian(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- impl.getMedian(controller, request, done);
- }
-
- };
- }
-
- public static com.google.protobuf.BlockingService
- newReflectiveBlockingService(final BlockingInterface impl) {
- return new com.google.protobuf.BlockingService() {
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final com.google.protobuf.Message callBlockingMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request)
- throws com.google.protobuf.ServiceException {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callBlockingMethod() given method descriptor for " +
- "wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 1:
- return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 2:
- return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 3:
- return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 4:
- return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 5:
- return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- case 6:
- return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 1:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 2:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 3:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 4:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 5:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 6:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 1:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 2:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 3:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 4:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 5:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 6:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- };
- }
-
- /**
- * <code>rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMax(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMin(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getSum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getRowNum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getAvg(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getStd(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- /**
- * <code>rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
- */
- public abstract void getMedian(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
-
- public static final
- com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0);
- }
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final void callMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request,
- com.google.protobuf.RpcCallback<
- com.google.protobuf.Message> done) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callMethod() given method descriptor for wrong " +
- "service type.");
- }
- switch(method.getIndex()) {
- case 0:
- this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 1:
- this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 2:
- this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 3:
- this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 4:
- this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 5:
- this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- case 6:
- this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
- com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
- done));
- return;
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 1:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 2:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 3:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 4:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 5:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- case 6:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 1:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 2:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 3:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 4:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 5:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- case 6:
- return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public static Stub newStub(
- com.google.protobuf.RpcChannel channel) {
- return new Stub(channel);
- }
-
- public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface {
- private Stub(com.google.protobuf.RpcChannel channel) {
- this.channel = channel;
- }
-
- private final com.google.protobuf.RpcChannel channel;
-
- public com.google.protobuf.RpcChannel getChannel() {
- return channel;
- }
-
- public void getMax(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- channel.callMethod(
- getDescriptor().getMethods().get(0),
- controller,
- request,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
- }
-
- public void getMin(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- channel.callMethod(
- getDescriptor().getMethods().get(1),
- controller,
- request,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
- }
-
- public void getSum(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
- com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
- channel.callMethod(
- getDescriptor().getMethods().get(2),
- controller,
- request,
- org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.hbase.
<TRUNCATED>