You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2012/10/15 04:32:09 UTC

svn commit: r1398175 [2/2] - in /hbase/trunk/hbase-server/src/main: java/org/apache/hadoop/hbase/client/coprocessor/ java/org/apache/hadoop/hbase/coprocessor/ java/org/apache/hadoop/hbase/protobuf/generated/ protobuf/

Added: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java?rev=1398175&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java (added)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java Mon Oct 15 02:32:09 2012
@@ -0,0 +1,1829 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: Aggregate.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class AggregateProtos {
+  private AggregateProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface AggregateArgumentOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // required string interpreterClassName = 1;
+    boolean hasInterpreterClassName();
+    String getInterpreterClassName();
+    
+    // required .Scan scan = 2;
+    boolean hasScan();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
+    
+    // optional bytes interpreterSpecificBytes = 3;
+    boolean hasInterpreterSpecificBytes();
+    com.google.protobuf.ByteString getInterpreterSpecificBytes();
+  }
+  public static final class AggregateArgument extends
+      com.google.protobuf.GeneratedMessage
+      implements AggregateArgumentOrBuilder {
+    // Use AggregateArgument.newBuilder() to construct.
+    private AggregateArgument(Builder builder) {
+      super(builder);
+    }
+    private AggregateArgument(boolean noInit) {}
+    
+    private static final AggregateArgument defaultInstance;
+    public static AggregateArgument getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public AggregateArgument getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // required string interpreterClassName = 1;
+    public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1;
+    private java.lang.Object interpreterClassName_;
+    public boolean hasInterpreterClassName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public String getInterpreterClassName() {
+      java.lang.Object ref = interpreterClassName_;
+      if (ref instanceof String) {
+        return (String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        String s = bs.toStringUtf8();
+        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+          interpreterClassName_ = s;
+        }
+        return s;
+      }
+    }
+    private com.google.protobuf.ByteString getInterpreterClassNameBytes() {
+      java.lang.Object ref = interpreterClassName_;
+      if (ref instanceof String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+        interpreterClassName_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+    
+    // required .Scan scan = 2;
+    public static final int SCAN_FIELD_NUMBER = 2;
+    private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
+    public boolean hasScan() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+      return scan_;
+    }
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+      return scan_;
+    }
+    
+    // optional bytes interpreterSpecificBytes = 3;
+    public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3;
+    private com.google.protobuf.ByteString interpreterSpecificBytes_;
+    public boolean hasInterpreterSpecificBytes() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+      return interpreterSpecificBytes_;
+    }
+    
+    private void initFields() {
+      interpreterClassName_ = "";
+      scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+      interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      if (!hasInterpreterClassName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasScan()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!getScan().isInitialized()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getInterpreterClassNameBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeMessage(2, scan_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, interpreterSpecificBytes_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getInterpreterClassNameBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(2, scan_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, interpreterSpecificBytes_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj;
+      
+      boolean result = true;
+      result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
+      if (hasInterpreterClassName()) {
+        result = result && getInterpreterClassName()
+            .equals(other.getInterpreterClassName());
+      }
+      result = result && (hasScan() == other.hasScan());
+      if (hasScan()) {
+        result = result && getScan()
+            .equals(other.getScan());
+      }
+      result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
+      if (hasInterpreterSpecificBytes()) {
+        result = result && getInterpreterSpecificBytes()
+            .equals(other.getInterpreterSpecificBytes());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasInterpreterClassName()) {
+        hash = (37 * hash) + INTERPRETERCLASSNAME_FIELD_NUMBER;
+        hash = (53 * hash) + getInterpreterClassName().hashCode();
+      }
+      if (hasScan()) {
+        hash = (37 * hash) + SCAN_FIELD_NUMBER;
+        hash = (53 * hash) + getScan().hashCode();
+      }
+      if (hasInterpreterSpecificBytes()) {
+        hash = (37 * hash) + INTERPRETERSPECIFICBYTES_FIELD_NUMBER;
+        hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getScanFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        interpreterClassName_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        if (scanBuilder_ == null) {
+          scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+        } else {
+          scanBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000002);
+        interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.interpreterClassName_ = interpreterClassName_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        if (scanBuilder_ == null) {
+          result.scan_ = scan_;
+        } else {
+          result.scan_ = scanBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.interpreterSpecificBytes_ = interpreterSpecificBytes_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this;
+        if (other.hasInterpreterClassName()) {
+          setInterpreterClassName(other.getInterpreterClassName());
+        }
+        if (other.hasScan()) {
+          mergeScan(other.getScan());
+        }
+        if (other.hasInterpreterSpecificBytes()) {
+          setInterpreterSpecificBytes(other.getInterpreterSpecificBytes());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        if (!hasInterpreterClassName()) {
+          
+          return false;
+        }
+        if (!hasScan()) {
+          
+          return false;
+        }
+        if (!getScan().isInitialized()) {
+          
+          return false;
+        }
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              interpreterClassName_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder();
+              if (hasScan()) {
+                subBuilder.mergeFrom(getScan());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setScan(subBuilder.buildPartial());
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              interpreterSpecificBytes_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // required string interpreterClassName = 1;
+      private java.lang.Object interpreterClassName_ = "";
+      public boolean hasInterpreterClassName() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public String getInterpreterClassName() {
+        java.lang.Object ref = interpreterClassName_;
+        if (!(ref instanceof String)) {
+          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+          interpreterClassName_ = s;
+          return s;
+        } else {
+          return (String) ref;
+        }
+      }
+      public Builder setInterpreterClassName(String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        interpreterClassName_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearInterpreterClassName() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
+        onChanged();
+        return this;
+      }
+      void setInterpreterClassName(com.google.protobuf.ByteString value) {
+        bitField0_ |= 0x00000001;
+        interpreterClassName_ = value;
+        onChanged();
+      }
+      
+      // required .Scan scan = 2;
+      private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
+      public boolean hasScan() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+        if (scanBuilder_ == null) {
+          return scan_;
+        } else {
+          return scanBuilder_.getMessage();
+        }
+      }
+      public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+        if (scanBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          scan_ = value;
+          onChanged();
+        } else {
+          scanBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      public Builder setScan(
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
+        if (scanBuilder_ == null) {
+          scan_ = builderForValue.build();
+          onChanged();
+        } else {
+          scanBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+        if (scanBuilder_ == null) {
+          if (((bitField0_ & 0x00000002) == 0x00000002) &&
+              scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
+            scan_ =
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+          } else {
+            scan_ = value;
+          }
+          onChanged();
+        } else {
+          scanBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      public Builder clearScan() {
+        if (scanBuilder_ == null) {
+          scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+          onChanged();
+        } else {
+          scanBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
+        bitField0_ |= 0x00000002;
+        onChanged();
+        return getScanFieldBuilder().getBuilder();
+      }
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+        if (scanBuilder_ != null) {
+          return scanBuilder_.getMessageOrBuilder();
+        } else {
+          return scan_;
+        }
+      }
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> 
+          getScanFieldBuilder() {
+        if (scanBuilder_ == null) {
+          scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+                  scan_,
+                  getParentForChildren(),
+                  isClean());
+          scan_ = null;
+        }
+        return scanBuilder_;
+      }
+      
+      // optional bytes interpreterSpecificBytes = 3;
+      private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+      public boolean hasInterpreterSpecificBytes() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+        return interpreterSpecificBytes_;
+      }
+      public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        interpreterSpecificBytes_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearInterpreterSpecificBytes() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes();
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:AggregateArgument)
+    }
+    
+    static {
+      defaultInstance = new AggregateArgument(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:AggregateArgument)
+  }
+  
+  public interface AggregateResponseOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // repeated bytes firstPart = 1;
+    java.util.List<com.google.protobuf.ByteString> getFirstPartList();
+    int getFirstPartCount();
+    com.google.protobuf.ByteString getFirstPart(int index);
+    
+    // optional bytes secondPart = 2;
+    boolean hasSecondPart();
+    com.google.protobuf.ByteString getSecondPart();
+  }
+  public static final class AggregateResponse extends
+      com.google.protobuf.GeneratedMessage
+      implements AggregateResponseOrBuilder {
+    // Use AggregateResponse.newBuilder() to construct.
+    private AggregateResponse(Builder builder) {
+      super(builder);
+    }
+    private AggregateResponse(boolean noInit) {}
+    
+    private static final AggregateResponse defaultInstance;
+    public static AggregateResponse getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public AggregateResponse getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // repeated bytes firstPart = 1;
+    public static final int FIRSTPART_FIELD_NUMBER = 1;
+    private java.util.List<com.google.protobuf.ByteString> firstPart_;
+    public java.util.List<com.google.protobuf.ByteString>
+        getFirstPartList() {
+      return firstPart_;
+    }
+    public int getFirstPartCount() {
+      return firstPart_.size();
+    }
+    public com.google.protobuf.ByteString getFirstPart(int index) {
+      return firstPart_.get(index);
+    }
+    
+    // optional bytes secondPart = 2;
+    public static final int SECONDPART_FIELD_NUMBER = 2;
+    private com.google.protobuf.ByteString secondPart_;
+    public boolean hasSecondPart() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public com.google.protobuf.ByteString getSecondPart() {
+      return secondPart_;
+    }
+    
+    private void initFields() {
+      firstPart_ = java.util.Collections.emptyList();;
+      secondPart_ = com.google.protobuf.ByteString.EMPTY;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < firstPart_.size(); i++) {
+        output.writeBytes(1, firstPart_.get(i));
+      }
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(2, secondPart_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      {
+        int dataSize = 0;
+        for (int i = 0; i < firstPart_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeBytesSizeNoTag(firstPart_.get(i));
+        }
+        size += dataSize;
+        size += 1 * getFirstPartList().size();
+      }
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, secondPart_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj;
+      
+      boolean result = true;
+      result = result && getFirstPartList()
+          .equals(other.getFirstPartList());
+      result = result && (hasSecondPart() == other.hasSecondPart());
+      if (hasSecondPart()) {
+        result = result && getSecondPart()
+            .equals(other.getSecondPart());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+    
+    @java.lang.Override
+    public int hashCode() {
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getFirstPartCount() > 0) {
+        hash = (37 * hash) + FIRSTPART_FIELD_NUMBER;
+        hash = (53 * hash) + getFirstPartList().hashCode();
+      }
+      if (hasSecondPart()) {
+        hash = (37 * hash) + SECONDPART_FIELD_NUMBER;
+        hash = (53 * hash) + getSecondPart().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      return hash;
+    }
+    
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        firstPart_ = java.util.Collections.emptyList();;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        secondPart_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          firstPart_ = java.util.Collections.unmodifiableList(firstPart_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.firstPart_ = firstPart_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.secondPart_ = secondPart_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this;
+        if (!other.firstPart_.isEmpty()) {
+          if (firstPart_.isEmpty()) {
+            firstPart_ = other.firstPart_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureFirstPartIsMutable();
+            firstPart_.addAll(other.firstPart_);
+          }
+          onChanged();
+        }
+        if (other.hasSecondPart()) {
+          setSecondPart(other.getSecondPart());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              ensureFirstPartIsMutable();
+              firstPart_.add(input.readBytes());
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              secondPart_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // repeated bytes firstPart = 1;
+      private java.util.List<com.google.protobuf.ByteString> firstPart_ = java.util.Collections.emptyList();;
+      private void ensureFirstPartIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          firstPart_ = new java.util.ArrayList<com.google.protobuf.ByteString>(firstPart_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+      public java.util.List<com.google.protobuf.ByteString>
+          getFirstPartList() {
+        return java.util.Collections.unmodifiableList(firstPart_);
+      }
+      public int getFirstPartCount() {
+        return firstPart_.size();
+      }
+      public com.google.protobuf.ByteString getFirstPart(int index) {
+        return firstPart_.get(index);
+      }
+      public Builder setFirstPart(
+          int index, com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureFirstPartIsMutable();
+        firstPart_.set(index, value);
+        onChanged();
+        return this;
+      }
+      public Builder addFirstPart(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureFirstPartIsMutable();
+        firstPart_.add(value);
+        onChanged();
+        return this;
+      }
+      public Builder addAllFirstPart(
+          java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
+        ensureFirstPartIsMutable();
+        super.addAll(values, firstPart_);
+        onChanged();
+        return this;
+      }
+      public Builder clearFirstPart() {
+        firstPart_ = java.util.Collections.emptyList();;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+      
+      // optional bytes secondPart = 2;
+      private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY;
+      public boolean hasSecondPart() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public com.google.protobuf.ByteString getSecondPart() {
+        return secondPart_;
+      }
+      public Builder setSecondPart(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        secondPart_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearSecondPart() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        secondPart_ = getDefaultInstance().getSecondPart();
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:AggregateResponse)
+    }
+    
+    static {
+      defaultInstance = new AggregateResponse(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:AggregateResponse)
+  }
+  
+  public static abstract class AggregateService
+      implements com.google.protobuf.Service {
+    protected AggregateService() {}
+    
+    public interface Interface {
+      public abstract void getMax(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getMin(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getSum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getRowNum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getAvg(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getStd(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+      public abstract void getMedian(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+      
+    }
+    
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new AggregateService() {
+        @java.lang.Override
+        public  void getMax(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getMax(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getMin(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getMin(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getSum(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getSum(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getRowNum(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getRowNum(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getAvg(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getAvg(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getStd(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getStd(controller, request, done);
+        }
+        
+        @java.lang.Override
+        public  void getMedian(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+          impl.getMedian(controller, request, done);
+        }
+        
+      };
+    }
+    
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+        
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 1:
+              return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 2:
+              return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 3:
+              return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 4:
+              return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 5:
+              return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            case 6:
+              return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 3:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 4:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 5:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            case 6:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 2:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 3:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 4:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 5:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            case 6:
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+      };
+    }
+    
+    public abstract void getMax(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getMin(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getSum(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getRowNum(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getAvg(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getStd(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public abstract void getMedian(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
+    
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 2:
+          this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 3:
+          this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 4:
+          this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 5:
+          this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        case 6:
+          this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 3:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 4:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 5:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        case 6:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 2:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 3:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 4:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 5:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        case 6:
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+    
+    public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.RpcChannel channel;
+      
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+      
+      public  void getMax(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getMin(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getSum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getRowNum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(3),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getAvg(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(4),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getStd(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(5),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+      
+      public  void getMedian(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(6),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()));
+      }
+    }
+    
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+    
+    public interface BlockingInterface {
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException;
+    }
+    
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.BlockingRpcChannel channel;
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(2),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(3),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(4),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(5),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+      
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(6),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance());
+      }
+      
+    }
+  }
+  
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_AggregateArgument_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_AggregateArgument_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_AggregateResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_AggregateResponse_fieldAccessorTable;
+  
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\017Aggregate.proto\032\014Client.proto\"h\n\021Aggre" +
+      "gateArgument\022\034\n\024interpreterClassName\030\001 \002" +
+      "(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022 \n\030interpreterSp" +
+      "ecificBytes\030\003 \001(\014\":\n\021AggregateResponse\022\021" +
+      "\n\tfirstPart\030\001 \003(\014\022\022\n\nsecondPart\030\002 \001(\0142\366\002" +
+      "\n\020AggregateService\0220\n\006getMax\022\022.Aggregate" +
+      "Argument\032\022.AggregateResponse\0220\n\006getMin\022\022" +
+      ".AggregateArgument\032\022.AggregateResponse\0220" +
+      "\n\006getSum\022\022.AggregateArgument\032\022.Aggregate" +
+      "Response\0223\n\tgetRowNum\022\022.AggregateArgumen",
+      "t\032\022.AggregateResponse\0220\n\006getAvg\022\022.Aggreg" +
+      "ateArgument\032\022.AggregateResponse\0220\n\006getSt" +
+      "d\022\022.AggregateArgument\032\022.AggregateRespons" +
+      "e\0223\n\tgetMedian\022\022.AggregateArgument\032\022.Agg" +
+      "regateResponseBE\n*org.apache.hadoop.hbas" +
+      "e.protobuf.generatedB\017AggregateProtosH\001\210" +
+      "\001\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_AggregateArgument_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_AggregateArgument_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_AggregateArgument_descriptor,
+              new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", },
+              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class,
+              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class);
+          internal_static_AggregateResponse_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_AggregateResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_AggregateResponse_descriptor,
+              new java.lang.String[] { "FirstPart", "SecondPart", },
+              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class,
+              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class);
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
+        }, assigner);
+  }
+  
+  // @@protoc_insertion_point(outer_class_scope)
+}

Added: hbase/trunk/hbase-server/src/main/protobuf/Aggregate.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/protobuf/Aggregate.proto?rev=1398175&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/protobuf/Aggregate.proto (added)
+++ hbase/trunk/hbase-server/src/main/protobuf/Aggregate.proto Mon Oct 15 02:32:09 2012
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "AggregateProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "Client.proto";
+
+message AggregateArgument {
+  /** The argument passed to the AggregateService consists of three parts
+   *  (1) the (canonical) classname of the ColumnInterpreter implementation
+   *  (2) the Scan query
+   *  (3) any bytes required to construct the ColumnInterpreter object
+   *      properly
+   */
+  required string interpreterClassName = 1;
+  required Scan scan = 2;
+  optional bytes  interpreterSpecificBytes = 3;
+}
+
+message AggregateResponse {
+  /**
+   * The AggregateService methods all have a response that either is a Pair
+   * or a simple object. When it is a Pair both firstPart and secondPart
+   * have defined values (and the secondPart is not present in the response 
+   * when the response is not a pair). Refer to the AggregateImplementation 
+   * class for an overview of the AggregateResponse object constructions. 
+   */ 
+  repeated bytes firstPart = 1;
+  optional bytes secondPart = 2;  
+}
+
+/** Refer to the AggregateImplementation class for an overview of the 
+ *  AggregateService method implementations and their functionality.
+ */
+service AggregateService {
+  rpc getMax (AggregateArgument) returns (AggregateResponse);
+  rpc getMin (AggregateArgument) returns (AggregateResponse);
+  rpc getSum (AggregateArgument) returns (AggregateResponse);
+  rpc getRowNum (AggregateArgument) returns (AggregateResponse);
+  rpc getAvg (AggregateArgument) returns (AggregateResponse);
+  rpc getStd (AggregateArgument) returns (AggregateResponse);
+  rpc getMedian (AggregateArgument) returns (AggregateResponse);
+}
\ No newline at end of file