You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/07/07 05:01:16 UTC

[42/51] [partial] hbase git commit: Revert "HBASE-17056 Remove checked in PB generated files Selective add of dependency on" Revert for now. Build unstable and some interesting issues around CLASSPATH

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-examples/README.txt
----------------------------------------------------------------------
diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt
index 22d1103..c47ed4f 100644
--- a/hbase-examples/README.txt
+++ b/hbase-examples/README.txt
@@ -63,8 +63,27 @@ Example code.
       3. Execute {./DemoClient}.
 
 ON PROTOBUFS
-This maven module has core protobuf definition files ('.protos') used by hbase
-examples. 
+This maven module has  protobuf definition files ('.protos') used by hbase
+Coprocessor Endpoints examples including tests. Coprocessor
+Endpoints are meant to be standalone, independent code not reliant on hbase
+internals. They define their Service using protobuf. The protobuf version
+they use can be distinct from that used by HBase internally since HBase started
+shading its protobuf references. Endpoints have no access to the shaded protobuf
+hbase uses. They do have access to the content of hbase-protocol -- the
+.protos found in here -- but avoid using as much of this as you can as it is
+liable to change.
 
-Generation of java files from protobuf .proto files included here is done as
-part of the build.
+Generation of java files from protobuf .proto files included here is done apart
+from the build. Run the generation whenever you make changes to the .orotos files
+and then check in the produced java (The reasoning is that change is infrequent
+so why pay the price of generating files anew on each build.
+
+To generate java files from protos run:
+
+ $ mvn compile -Dcompile-protobuf
+or
+ $ mvn compile -Pcompile-protobuf
+
+After you've done the above, check it and then check in changes (or post a patch
+on a JIRA with your definition file changes and the generated files). Be careful
+to notice new files and files removed and do appropriate git rm/adds.

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 5c6acfa..3a30add 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -29,7 +29,7 @@
   <artifactId>hbase-examples</artifactId>
   <name>Apache HBase - Examples</name>
   <description>Examples of HBase usage</description>
-  <!--REMOVE-->
+<!--REMOVE-->
   <build>
     <plugins>
       <plugin>
@@ -46,33 +46,20 @@
           <skipAssembly>true</skipAssembly>
         </configuration>
       </plugin>
-      <plugin>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <version>${surefire.version}</version>
-        <configuration>
-          <!-- Have to set the groups here because we only do
+        <plugin>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <version>${surefire.version}</version>
+            <configuration>
+                <!-- Have to set the groups here because we only do
                     split tests in this package, so groups on live in this module -->
-          <groups>${surefire.firstPartGroups}</groups>
-        </configuration>
-      </plugin>
-      <!-- Make a jar and put the sources in the jar -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-      </plugin>
-      <plugin>
-        <groupId>org.xolstice.maven.plugins</groupId>
-        <artifactId>protobuf-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>compile-protoc</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
+                <groups>${surefire.firstPartGroups}</groups>
+            </configuration>
+        </plugin>
+        <!-- Make a jar and put the sources in the jar -->
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-source-plugin</artifactId>
+        </plugin>
     </plugins>
     <pluginManagement>
       <plugins>
@@ -94,7 +81,7 @@
                     </goals>
                   </pluginExecutionFilter>
                   <action>
-                    <ignore/>
+                    <ignore></ignore>
                   </action>
                 </pluginExecution>
                 <pluginExecution>
@@ -107,7 +94,7 @@
                     </goals>
                   </pluginExecutionFilter>
                   <action>
-                    <ignore/>
+                    <ignore></ignore>
                   </action>
                 </pluginExecution>
               </pluginExecutions>
@@ -125,16 +112,16 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-common</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-protocol</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-protocol</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-client</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-client</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
@@ -149,10 +136,11 @@
       <artifactId>hbase-thrift</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-testing-util</artifactId>
-      <scope>test</scope>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-testing-util</artifactId>
+        <scope>test</scope>
     </dependency>
+
     <dependency>
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
@@ -169,126 +157,152 @@
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
     </dependency>
-  </dependencies>
-  <profiles>
-    <!-- Skip the tests in this module -->
-    <profile>
-      <id>skipExamplesTests</id>
-      <activation>
-        <property>
-          <name>skipExamplesTests</name>
-        </property>
-      </activation>
-      <properties>
-        <surefire.skipFirstPart>true</surefire.skipFirstPart>
-        <surefire.skipSecondPart>true</surefire.skipSecondPart>
-      </properties>
-    </profile>
-    <!-- Profiles for building against different hadoop versions -->
-    <!-- There are a lot of common dependencies used here, should investigate
+ </dependencies>
+ <profiles>
+     <!-- Skip the tests in this module -->
+     <profile>
+         <id>skipExamplesTests</id>
+         <activation>
+             <property>
+                 <name>skipExamplesTests</name>
+             </property>
+         </activation>
+         <properties>
+             <surefire.skipFirstPart>true</surefire.skipFirstPart>
+             <surefire.skipSecondPart>true</surefire.skipSecondPart>
+         </properties>
+     </profile>
+        <profile>
+          <id>compile-protobuf</id>
+          <activation>
+            <property>
+              <name>compile-protobuf</name>
+            </property>
+          </activation>
+          <build>
+            <plugins>
+              <plugin>
+                <groupId>org.xolstice.maven.plugins</groupId>
+                <artifactId>protobuf-maven-plugin</artifactId>
+                <executions>
+                  <execution>
+                    <id>compile-protoc</id>
+                    <phase>generate-sources</phase>
+                    <goals>
+                      <goal>compile</goal>
+                    </goals>
+                  </execution>
+                </executions>
+              </plugin>
+            </plugins>
+          </build>
+        </profile>
+
+     <!-- Profiles for building against different hadoop versions -->
+     <!-- There are a lot of common dependencies used here, should investigate
 if we can combine these profiles somehow -->
-    <!-- profile for building against Hadoop 2.x.  This is the default -->
-    <profile>
-      <id>hadoop-2.0</id>
-      <activation>
-        <property>
-          <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
-          <!--h2-->
-          <name>!hadoop.profile</name>
-        </property>
-      </activation>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
+
+     <!-- profile for building against Hadoop 2.x.  This is the default -->
+     <profile>
+         <id>hadoop-2.0</id>
+         <activation>
+             <property>
+            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
+            <!--h2--><name>!hadoop.profile</name>
+             </property>
+         </activation>
+         <dependencies>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-mapreduce-client-core</artifactId>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-common</artifactId>
+             </dependency>
+         </dependencies>
+         <build>
+             <plugins>
+                 <plugin>
+                     <artifactId>maven-dependency-plugin</artifactId>
+                     <executions>
+                         <execution>
+                             <id>create-mrapp-generated-classpath</id>
+                             <phase>generate-test-resources</phase>
+                             <goals>
+                                 <goal>build-classpath</goal>
+                             </goals>
+                             <configuration>
+                                 <!-- needed to run the unit test for DS to generate
                                  the required classpath that is required in the env
                                  of the launch container in the mini mr/yarn cluster
                                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <!--
+                                 <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
+                             </configuration>
+                         </execution>
+                     </executions>
+                 </plugin>
+             </plugins>
+         </build>
+     </profile>
+     <!--
        profile for building against Hadoop 3.0.x. Activate using:
         mvn -Dhadoop.profile=3.0
      -->
-    <profile>
-      <id>hadoop-3.0</id>
-      <activation>
-        <property>
-          <name>hadoop.profile</name>
-          <value>3.0</value>
-        </property>
-      </activation>
-      <properties>
-        <hadoop.version>3.0-SNAPSHOT</hadoop.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-annotations</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>jdk.tools</groupId>
-              <artifactId>jdk.tools</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-        </dependency>
-      </dependencies>
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>create-mrapp-generated-classpath</id>
-                <phase>generate-test-resources</phase>
-                <goals>
-                  <goal>build-classpath</goal>
-                </goals>
-                <configuration>
-                  <!-- needed to run the unit test for DS to generate
+     <profile>
+         <id>hadoop-3.0</id>
+         <activation>
+             <property>
+                 <name>hadoop.profile</name>
+                 <value>3.0</value>
+             </property>
+         </activation>
+         <properties>
+             <hadoop.version>3.0-SNAPSHOT</hadoop.version>
+         </properties>
+         <dependencies>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-common</artifactId>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-annotations</artifactId>
+                 <exclusions>
+                   <exclusion>
+                     <groupId>jdk.tools</groupId>
+                     <artifactId>jdk.tools</artifactId>
+                   </exclusion>
+                 </exclusions>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-minicluster</artifactId>
+             </dependency>
+         </dependencies>
+         <build>
+             <plugins>
+                 <plugin>
+                     <artifactId>maven-dependency-plugin</artifactId>
+                     <executions>
+                         <execution>
+                             <id>create-mrapp-generated-classpath</id>
+                             <phase>generate-test-resources</phase>
+                             <goals>
+                                 <goal>build-classpath</goal>
+                             </goals>
+                             <configuration>
+                                 <!-- needed to run the unit test for DS to generate
                                  the required classpath that is required in the env
                                  of the launch container in the mini mr/yarn cluster
                                  -->
-                  <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
+                                 <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
+                             </configuration>
+                         </execution>
+                     </executions>
+                 </plugin>
+             </plugins>
+         </build>
+     </profile>
   </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
new file mode 100644
index 0000000..b780985
--- /dev/null
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
@@ -0,0 +1,1149 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: Examples.proto
+
+package org.apache.hadoop.hbase.coprocessor.example.generated;
+
+public final class ExampleProtos {
+  private ExampleProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface CountRequestOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code hbase.pb.CountRequest}
+   */
+  public static final class CountRequest extends
+      com.google.protobuf.GeneratedMessage
+      implements CountRequestOrBuilder {
+    // Use CountRequest.newBuilder() to construct.
+    private CountRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final CountRequest defaultInstance;
+    public static CountRequest getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public CountRequest getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private CountRequest(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<CountRequest> PARSER =
+        new com.google.protobuf.AbstractParser<CountRequest>() {
+      public CountRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new CountRequest(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<CountRequest> getParserForType() {
+      return PARSER;
+    }
+
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) obj;
+
+      boolean result = true;
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.CountRequest}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest build() {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this);
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) {
+          return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other) {
+        if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.CountRequest)
+    }
+
+    static {
+      defaultInstance = new CountRequest(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.CountRequest)
+  }
+
+  public interface CountResponseOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int64 count = 1 [default = 0];
+    /**
+     * <code>required int64 count = 1 [default = 0];</code>
+     */
+    boolean hasCount();
+    /**
+     * <code>required int64 count = 1 [default = 0];</code>
+     */
+    long getCount();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.CountResponse}
+   */
+  public static final class CountResponse extends
+      com.google.protobuf.GeneratedMessage
+      implements CountResponseOrBuilder {
+    // Use CountResponse.newBuilder() to construct.
+    private CountResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final CountResponse defaultInstance;
+    public static CountResponse getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public CountResponse getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private CountResponse(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              count_ = input.readInt64();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<CountResponse> PARSER =
+        new com.google.protobuf.AbstractParser<CountResponse>() {
+      public CountResponse parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new CountResponse(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<CountResponse> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int64 count = 1 [default = 0];
+    public static final int COUNT_FIELD_NUMBER = 1;
+    private long count_;
+    /**
+     * <code>required int64 count = 1 [default = 0];</code>
+     */
+    public boolean hasCount() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int64 count = 1 [default = 0];</code>
+     */
+    public long getCount() {
+      return count_;
+    }
+
+    private void initFields() {
+      count_ = 0L;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasCount()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt64(1, count_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(1, count_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj;
+
+      boolean result = true;
+      result = result && (hasCount() == other.hasCount());
+      if (hasCount()) {
+        result = result && (getCount()
+            == other.getCount());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasCount()) {
+        hash = (37 * hash) + COUNT_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getCount());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.CountResponse}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        count_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse build() {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.count_ = count_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) {
+          return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other) {
+        if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()) return this;
+        if (other.hasCount()) {
+          setCount(other.getCount());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasCount()) {
+          
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int64 count = 1 [default = 0];
+      private long count_ ;
+      /**
+       * <code>required int64 count = 1 [default = 0];</code>
+       */
+      public boolean hasCount() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int64 count = 1 [default = 0];</code>
+       */
+      public long getCount() {
+        return count_;
+      }
+      /**
+       * <code>required int64 count = 1 [default = 0];</code>
+       */
+      public Builder setCount(long value) {
+        bitField0_ |= 0x00000001;
+        count_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int64 count = 1 [default = 0];</code>
+       */
+      public Builder clearCount() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        count_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse)
+    }
+
+    static {
+      defaultInstance = new CountResponse(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.CountResponse)
+  }
+
+  /**
+   * Protobuf service {@code hbase.pb.RowCountService}
+   */
+  public static abstract class RowCountService
+      implements com.google.protobuf.Service {
+    protected RowCountService() {}
+
+    public interface Interface {
+      /**
+       * <code>rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
+       */
+      public abstract void getRowCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
+
+      /**
+       * <code>rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
+       */
+      public abstract void getKeyValueCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
+
+    }
+
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new RowCountService() {
+        @java.lang.Override
+        public  void getRowCount(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done) {
+          impl.getRowCount(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void getKeyValueCount(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done) {
+          impl.getKeyValueCount(controller, request, done);
+        }
+
+      };
+    }
+
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request);
+            case 1:
+              return impl.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
+            case 1:
+              return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+
+      };
+    }
+
+    /**
+     * <code>rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
+     */
+    public abstract void getRowCount(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
+
+    /**
+     * <code>rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
+     */
+    public abstract void getKeyValueCount(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
+
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.getDescriptor().getServices().get(0);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse>specializeCallback(
+              done));
+          return;
+        case 1:
+          this.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
+        case 1:
+          return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+
+    public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.RpcChannel channel;
+
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+
+      public  void getRowCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()));
+      }
+
+      public  void getKeyValueCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class,
+            org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()));
+      }
+    }
+
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+
+    public interface BlockingInterface {
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
+          throws com.google.protobuf.ServiceException;
+    }
+
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+
+      private final com.google.protobuf.BlockingRpcChannel channel;
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(1),
+          controller,
+          request,
+          org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance());
+      }
+
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.RowCountService)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_CountRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_CountRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_CountResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_CountResponse_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\016Examples.proto\022\010hbase.pb\"\016\n\014CountReque" +
+      "st\"!\n\rCountResponse\022\020\n\005count\030\001 \002(\003:\00102\226\001" +
+      "\n\017RowCountService\022>\n\013getRowCount\022\026.hbase" +
+      ".pb.CountRequest\032\027.hbase.pb.CountRespons" +
+      "e\022C\n\020getKeyValueCount\022\026.hbase.pb.CountRe" +
+      "quest\032\027.hbase.pb.CountResponseBN\n5org.ap" +
+      "ache.hadoop.hbase.coprocessor.example.ge" +
+      "neratedB\rExampleProtosH\001\210\001\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_hbase_pb_CountRequest_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_hbase_pb_CountRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_CountRequest_descriptor,
+              new java.lang.String[] { });
+          internal_static_hbase_pb_CountResponse_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_hbase_pb_CountResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_CountResponse_descriptor,
+              new java.lang.String[] { "Count", });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-procedure/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index befbf28..2fa01a1 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -70,10 +70,6 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-protobuf</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-protocol-shaded</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-protocol-shaded/README.txt
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/README.txt b/hbase-protocol-shaded/README.txt
index b0030fa..26a9665 100644
--- a/hbase-protocol-shaded/README.txt
+++ b/hbase-protocol-shaded/README.txt
@@ -1,6 +1,53 @@
+Please read carefully as the 'menu options' have changed.
+What you do in here is not what you do elsewhere to generate
+proto java files.
+
 This module has proto files used by core. These protos
 overlap with protos that are used by coprocessor endpoints
-(CPEP) in the module hbase-protocol. So core versions have
+(CPEP) in the module hbase-protocol. So the core versions have
 a different name, the generated classes are relocated
 -- i.e. shaded -- to a new location; they are moved from
 org.apache.hadoop.hbase.* to org.apache.hadoop.hbase.shaded.
+
+This module also includes the protobuf that hbase core depends
+on again relocated to live at an offset of
+org.apache.hadoop.hbase.shaded so as to avoid clashes with other
+versions of protobuf resident on our CLASSPATH included,
+transitively or otherwise, by dependencies: i.e. the shaded
+protobuf Message class is at
+org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
+rather than at com.google.protobuf.Message.
+
+Finally, this module also includes patches applied on top of
+protobuf to add functionality not yet in protobuf that we
+need now.
+
+If you make changes to protos, to the protobuf version or to
+the patches you want to apply to protobuf, you must rerun the
+below step and then check in what it generated:
+
+ $ mvn install -Dcompile-protobuf
+
+or
+
+ $ mvn install -Pcompile-protobuf
+
+NOTE: 'install' above whereas other proto generation only needs 'compile'
+NOTE: Unlike elsehwere the above command does NOT install this modules jar
+into the repo., intentionally. The jar made by the above is a scratch jar
+that is part of the process that gets us to a set of files to check in;
+it is not for consumption. Run mvn install without the '-Pcompile-protobuf'
+option to get this modules' artifact installed in your repo!
+
+When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded
+will have been updated. Make sure all builds and then carefully
+check in the changes. Files may have been added or removed
+by the steps above.
+
+The protobuf version used internally by hbase differs from what
+is used over in the CPEP hbase-protocol module but mvn takes care
+of ensuring we have the right protobuf in place so you don't have to.
+
+If you have patches for the protobuf, add them to
+src/main/patches directory. They will be applied after
+protobuf is shaded and unbundled into src/main/java.

http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-protocol-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 225e9e4..d5953b6 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -32,12 +32,18 @@
   <properties>
     <maven.javadoc.skip>true</maven.javadoc.skip>
     <!--Version of protobuf that hbase uses internally (we shade our pb)
-         Must match what is out in hbase-thirdparty include.
-         (Note, there may only be a 3.3.0 protoc... no 3.3.1 protoc
            -->
-    <internal.protobuf.version>3.3.0</internal.protobuf.version>
+    <internal.protobuf.version>3.2.0</internal.protobuf.version>
+    <!--The Default target dir-->
+    <classes.dir>${project.build.directory}/classes</classes.dir>
+    <!--The Default location for sources-->
+    <sources.dir>src/main/java</sources.dir>
   </properties>
   <build>
+    <!--I want to override these in profile so define them
+         with variables up here-->
+    <sourceDirectory>${sources.dir}</sourceDirectory>
+    <outputDirectory>${classes.dir}</outputDirectory>
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
@@ -74,66 +80,6 @@
           </execution>
         </executions>
       </plugin>
-      <plugin>
-        <groupId>org.xolstice.maven.plugins</groupId>
-        <artifactId>protobuf-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>compile-protoc</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <configuration>
-              <protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
-              <attachProtoSources>false</attachProtoSources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <minimizeJar>true</minimizeJar>
-              <shadeSourcesContent>true</shadeSourcesContent>
-              <!-- Causes an NPE until shade 3.0.1.  See MSHADE-247
-              <createSourcesJar>true</createSourcesJar>
-              -->
-              <relocations>
-                <relocation>
-                  <pattern>com.google.protobuf</pattern>
-                  <shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
-                </relocation>
-              </relocations>
-              <artifactSet>
-                <excludes>
-                  <!--Exclude protobuf itself. We get a patched version from hbase-thirdparty.
-                    -->
-                  <exclude>com.google.protobuf:protobuf-java</exclude>
-                  <exclude>com.google.code.findbugs:*</exclude>
-                  <exclude>com.google.errorprone:error_prone_annotations</exclude>
-                  <exclude>com.google.j2objc:j2objc-annotations</exclude>
-                  <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
-                  <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
-                  <exclude>junit:junit</exclude>
-                  <exclude>log4j:log4j</exclude>
-                  <exclude>commons-logging:commons-logging</exclude>
-                  <exclude>org.apache.hbase:hbase-annotations</exclude>
-                  <exclude>com.github.stephenc.fingbugs:*</exclude>
-                  <exclude></exclude>
-                </excludes>
-              </artifactSet>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>
@@ -203,5 +149,216 @@
         <surefire.skipFirstPart>true</surefire.skipFirstPart>
       </properties>
     </profile>
+    <profile>
+      <id>compile-protobuf</id>
+      <!--
+         Generate and shade proto files. Drops generated java files
+         under src/main/java when done. Check in the generated files so
+         available at build time. Run this profile/step everytime you change
+         proto files or update the protobuf version.
+
+         The below does a bunch of ugly stuff. It purges current content
+         of the generated and shaded com.google.protobuf java files first.
+         Let me say that again. We do a remove of java files under src/main/java
+         in the shaded dirs. It does this because later we apply patches and
+         patches fail if they've already been applied. We remove too because we
+         overlay the shaded protobuf and if files have been removed or added,
+         it'll be more plain if we have first done this delete.
+
+         Next up we generate protos, build a scratch jar that contains protos
+         only and stuff we want shaded (we have to do this because shading only
+         works at install time on a jar), run the shade on the jar, then
+         carefully STOP this scratch jar from being put into the local repository
+         (because it can mess up builds that come later... mvn automatically wants
+         to install artifact into repo per module). Finally, undo this shaded
+         jar over the src/main/java directory, and then apply patches atop this.
+
+         The result needs to be checked in.
+      -->
+      <activation>
+        <property>
+          <name>compile-protobuf</name>
+        </property>
+      </activation>
+      <properties>
+        <profile.id>compile-protobuf</profile.id>
+        <!--Directory under target to hold generated protos files-->
+        <protoc.sources.dir>${project.build.directory}/protoc-generated-sources</protoc.sources.dir>
+        <!--When doing this step, the sources.dir is pointed at generated protos, NOT src/main/java-->
+        <sources.dir>${protoc.sources.dir}</sources.dir>
+        <!--Where to compile protos into-->
+        <classes.dir>${project.build.directory}/protoc-generated-classes</classes.dir>
+        <!--When the compile for this profile runs, make sure it makes jars that
+            can be related back to this shading profile. Give them the shading profile
+            name as a prefix.
+         -->
+        <jar.finalName>${profile.id}.${project.artifactId}-${project.version}</jar.finalName>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-clean-plugin</artifactId>
+              <executions>
+                <execution>
+                  <id>pre-compile-protoc</id>
+                  <phase>generate-sources</phase>
+                  <goals>
+                    <goal>clean</goal>
+                  </goals>
+                <configuration>
+                  <filesets>
+                    <fileset>
+                      <directory>${basedir}/src/main/java/org/apache/hadoop/hbase/shaded</directory>
+                      <includes>
+                        <include>ipc/protobuf/generated/**/*.java</include>
+                        <include>protobuf/generated/**/*.java</include>
+                        <include>com/google/protobuf/**/*.java</include>
+                      </includes>
+                      <followSymlinks>false</followSymlinks>
+                    </fileset>
+                  </filesets>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.xolstice.maven.plugins</groupId>
+            <artifactId>protobuf-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile-protoc</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>compile</goal>
+                </goals>
+                <configuration>
+                  <protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
+                  <outputDirectory>${protoc.sources.dir}</outputDirectory>
+                  <attachProtoSources>false</attachProtoSources>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-jar-plugin</artifactId>
+            <version>${maven.jar.version}</version>
+            <configuration>
+              <finalName>${jar.finalName}</finalName>                   
+            </configuration>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-shade-plugin</artifactId>
+            <version>${maven.shade.version}</version>
+            <executions>
+              <execution>
+                <phase>package</phase>
+                <goals>
+                  <goal>shade</goal>
+                </goals>
+                <configuration>
+                  <shadeSourcesContent>true</shadeSourcesContent>
+                  <createSourcesJar>true</createSourcesJar>
+                  <relocations>
+                    <relocation>
+                      <pattern>com.google.protobuf</pattern>
+                      <shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
+                    </relocation>
+                  </relocations>
+                  <!-- What I got when I did a mvn dependency:list for this
+                   module. Exclude all but the protobuf
+                [INFO]    commons-logging:commons-logging:jar:1.2:compile
+                [INFO]    com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1:compile
+                [INFO]    log4j:log4j:jar:1.2.17:compile
+                [INFO]    com.google.protobuf:protobuf-java:jar:2.5.0:compile
+                [INFO]    org.hamcrest:hamcrest-core:jar:1.3:test
+                [INFO]    org.mockito:mockito-all:jar:1.10.8:test
+                [INFO]    junit:junit:jar:4.12:compile
+                [INFO]    org.apache.hbase:hbase-annotations:jar:2.0.0-SNAPSHOT:compile
+
+                  The list below must exlude all of the above except protobuf.
+              -->
+                  <artifactSet>
+                    <excludes>
+                      <exclude>commons-logging:commons-logging</exclude>
+                      <exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
+                      <exclude>log4j:log4j</exclude>
+                      <exclude>org.hamcrest:hamcrest-core</exclude>
+                      <exclude>org.mockito:mockito-all</exclude>
+                      <exclude>junit:junit</exclude>
+                      <exclude>org.apache.hbase:hbase-annotations</exclude>
+                    </excludes>
+                  </artifactSet>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <!--Now unpack the shaded jar made above so the shaded classes
+             are available to subsequent modules-->
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>unpack</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>unpack</goal>
+                </goals>
+                <configuration>
+                  <artifactItems>
+                    <artifactItem>
+                      <groupId>${project.groupId}</groupId>
+                      <artifactId>${project.artifactId}</artifactId>
+                      <version>${project.version}</version>
+                      <classifier>sources</classifier>
+                      <type>jar</type>
+                      <overWrite>true</overWrite>
+                      <outputDirectory>${basedir}/src/main/java</outputDirectory>
+                      <includes>**/*.java</includes>
+                    </artifactItem>
+                  </artifactItems>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-patch-plugin</artifactId>
+        <version>${maven.patch.version}</version>
+        <configuration>
+           <!--Patches are made at top-level-->
+           <targetDirectory>${basedir}/..</targetDirectory>
+          <skipApplication>false</skipApplication>
+        </configuration>
+        <executions>
+          <execution>
+            <id>patch</id>
+            <configuration>
+               <strip>1</strip>
+              <patchDirectory>src/main/patches</patchDirectory>
+              <patchTrackingFile>${project.build.directory}/patches-applied.txt</patchTrackingFile>
+              <naturalOrderProcessing>true</naturalOrderProcessing>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <!--This should run after the above unpack phase-->
+              <goal>apply</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+        <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-install-plugin</artifactId>
+            <version>${maven.install.version}</version>
+            <configuration>
+                <skip>true</skip>
+            </configuration>
+        </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 </project>