You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/10/27 20:17:21 UTC

hbase git commit: Revert "HBASE-16952 Replace hadoop-maven-plugins with protobuf-maven-plugin for building protos" Revert premature commit

Repository: hbase
Updated Branches:
  refs/heads/master 7b74dd037 -> 738ff821d


Revert "HBASE-16952 Replace hadoop-maven-plugins with protobuf-maven-plugin for building protos"
Revert premature commit

This reverts commit d0e61b0e9ae3e998074834c500a663f9412629bc.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/738ff821
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/738ff821
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/738ff821

Branch: refs/heads/master
Commit: 738ff821dd092a1206cb39f6a024620df5710256
Parents: 7b74dd0
Author: Michael Stack <st...@apache.org>
Authored: Thu Oct 27 13:17:07 2016 -0700
Committer: Michael Stack <st...@apache.org>
Committed: Thu Oct 27 13:17:07 2016 -0700

----------------------------------------------------------------------
 hbase-endpoint/README.txt                       |  21 +-
 hbase-endpoint/pom.xml                          |  35 +-
 hbase-examples/README.txt                       |   1 +
 hbase-examples/pom.xml                          |  21 +-
 hbase-protocol-shaded/README.txt                |  32 +-
 hbase-protocol-shaded/pom.xml                   |  55 +-
 .../src/main/protobuf/CellSetMessage.proto      |  28 +
 .../src/main/protobuf/RowProcessor.proto        |  45 ++
 hbase-protocol/README.txt                       |  14 +-
 hbase-protocol/pom.xml                          |  51 +-
 .../protobuf/generated/TestProcedureProtos.java | 530 -------------------
 hbase-rest/README.txt                           |  26 -
 hbase-rest/pom.xml                              |  31 +-
 hbase-rsgroup/README.txt                        |  32 +-
 hbase-rsgroup/pom.xml                           |  28 -
 hbase-spark/README.txt                          |  21 +-
 hbase-spark/pom.xml                             |  21 +-
 pom.xml                                         |  12 +-
 18 files changed, 342 insertions(+), 662 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-endpoint/README.txt
----------------------------------------------------------------------
diff --git a/hbase-endpoint/README.txt b/hbase-endpoint/README.txt
index fac9e05..4deba00 100644
--- a/hbase-endpoint/README.txt
+++ b/hbase-endpoint/README.txt
@@ -18,16 +18,25 @@ v2.5.0 of protobuf, it is obtainable from here:
 
  https://github.com/google/protobuf/releases/tag/v2.5.0
 
-You can compile the protoc definitions by invoking maven with profile compile-protobuf
-or passing in compile-protobuf property.
+HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
+compile the protoc definitions by invoking maven with profile compile-protobuf or
+passing in compile-protobuf property.
 
- $ mvn compile -Dcompile-protobuf
+mvn compile -Dcompile-protobuf
 or
- $ mvn compile -Pcompile-protobuf
+mvn compile -Pcompile-protobuf
 
-You may also want to define protocExecutable for the protoc binary
+You may also want to define protoc.path for the protoc binary
 
- $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
+
+If you have added a new proto file, you should add it to the pom.xml file first.
+Other modules also support the maven profile.
 
 After you've done the above, check it in and then check it in (or post a patch
 on a JIRA with your definition file changes and the generated files).
+
+NOTE: The maven protoc plugin is a little broken. It will only source one dir
+at a time. If changes in protobuf files, you will have to first do protoc with
+the src directory pointing back into hbase-protocol module and then rerun it
+after editing the pom to point in here to source .proto files.

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 16fc2ac..de05950 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -189,17 +189,44 @@
           <build>
             <plugins>
               <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-maven-plugins</artifactId>
                 <executions>
                   <execution>
                     <id>compile-protoc</id>
                     <phase>generate-sources</phase>
                     <goals>
-                      <goal>compile</goal>
+                      <goal>protoc</goal>
                     </goals>
                     <configuration>
-                       <protoSourceRoot>${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf</protoSourceRoot>
+                      <imports>
+                        <!--Reference the .protos files up in hbase-protocol so don't
+                             have to copy them local here-->
+                        <param>${basedir}/src/main/protobuf</param>
+                        <param>${basedir}/../hbase-protocol/src/main/protobuf</param>
+                      </imports>
+                      <source>
+                        <!--The last directory wins so we need to for now manually run
+                             it once with the hbase-protocol pointer and then after
+                             with pointer to the local protobuf dir-->
+                        <directory>${basedir}/../hbase-protocol/src/main/protobuf</directory>
+                        <directory>${basedir}/src/main/protobuf</directory>
+                        <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -->
+                        <includes>
+                          <!--CPEPs-->
+                          <include>Aggregate.proto</include>
+                          <include>BulkDelete.proto</include>
+                          <include>DummyRegionServerEndpoint.proto</include>
+                          <include>ColumnAggregationNullResponseProtocol.proto</include>
+                          <include>ColumnAggregationProtocol.proto</include>
+                          <include>ColumnAggregationWithErrorsProtocol.proto</include>
+                          <include>IncrementCounterProcessor.proto</include>
+                          <include>SecureBulkLoad.proto</include>
+                        </includes>
+                      </source>
+                      <!--<output>${project.build.directory}/generated-sources/java</output>-->
+                     <output>${basedir}/src/main/java/</output>
                     </configuration>
                   </execution>
                 </executions>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-examples/README.txt
----------------------------------------------------------------------
diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt
index 3252a80..78051a6 100644
--- a/hbase-examples/README.txt
+++ b/hbase-examples/README.txt
@@ -65,3 +65,4 @@ Example code.
 Also includes example coprocessor endpoint examples. The protobuf files are at src/main/protobuf.
 See hbase-protocol README.txt for how to generate the example RowCountService Coprocessor
 Endpoint and Aggregator examples.
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 9fcb6ce..2238857 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -179,15 +179,30 @@
           <build>
             <plugins>
               <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-maven-plugins</artifactId>
                 <executions>
                   <execution>
                     <id>compile-protoc</id>
                     <phase>generate-sources</phase>
                     <goals>
-                      <goal>compile</goal>
+                      <goal>protoc</goal>
                     </goals>
+                    <configuration>
+                      <imports>
+                        <param>${basedir}/src/main/protobuf</param>
+                      </imports>
+                      <source>
+                        <directory>${basedir}/src/main/protobuf</directory>
+                        <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -->
+                        <includes>
+                          <include>Examples.proto</include>
+                        </includes>
+                      </source>
+                      <!--<output>${project.build.directory}/generated-sources/java</output>-->
+                     <output>${basedir}/src/main/java/</output>
+                    </configuration>
                   </execution>
                 </executions>
               </plugin>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol-shaded/README.txt
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/README.txt b/hbase-protocol-shaded/README.txt
index 3e886f4..b009643 100644
--- a/hbase-protocol-shaded/README.txt
+++ b/hbase-protocol-shaded/README.txt
@@ -1,6 +1,4 @@
 Please read carefully as the 'menu options' have changed.
-What you do in here is not what you do elsewhere to generate
-proto java files.
 
 This module has proto files used by core. These protos
 overlap with protos that are used by coprocessor endpoints
@@ -22,9 +20,26 @@ Finally, this module also includes patches applied on top of
 protobuf to add functionality not yet in protobuf that we
 need now.
 
+The shaded generated java files, including the patched protobuf
+source files are all checked in.
+
 If you make changes to protos, to the protobuf version or to
-the patches you want to apply to protobuf, you must rerun the
-below step and then check in what it generated:
+the patches you want to apply to protobuf, you must rerun this
+step.
+
+First ensure that the appropriate protobuf protoc tool is in
+your $PATH as in:
+
+ $ export PATH=~/bin/protobuf-3.1.0/src:$PATH
+
+.. or pass -Dprotoc.path=PATH_TO_PROTOC when running
+the below mvn commands. NOTE: The protoc that we use internally
+is very likely NOT what is used over in the hbase-protocol
+module (here we'd use a 3.1.0 where in hbase-protocol we'll
+use something older, a 2.5.0). You may need to download protobuf and
+build protoc first.
+
+Run:
 
  $ mvn install -Dcompile-protobuf
 
@@ -32,20 +47,15 @@ or
 
  $ mvn install -Pcompille-protobuf
 
+to build and trigger the special generate-shaded-classes profile.
 When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded
 will have been updated. Make sure all builds and then carefully
 check in the changes. Files may have been added or removed
 by the steps above.
 
-The protobuf version used internally by hbase differs from what
-is used over in the CPEP hbase-protocol module but in here, the
-mvn takes care of ensuring we have the right protobuf in place so
-you don't have to.
-
 If you have patches for the protobuf, add them to
 src/main/patches directory. They will be applied after
 protobuf is shaded and unbundled into src/main/java.
 
 See the pom.xml under the generate-shaded-classes profile
-for more info on how this step works; it is a little involved
-and a bit messy but all in the name of saving you pain.
+for more info on how this step works.

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index d703c81..01845ae 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -40,13 +40,6 @@
     <sources.dir>src/main/java</sources.dir>
   </properties>
   <build>
-    <extensions>
-      <extension>
-        <groupId>kr.motd.maven</groupId>
-        <artifactId>os-maven-plugin</artifactId>
-        <version>1.4.0.Final</version>
-      </extension>
-    </extensions>
     <!--I want to override these in profile so define them
          with variables up here-->
     <sourceDirectory>${sources.dir}</sourceDirectory>
@@ -223,20 +216,58 @@
             </executions>
           </plugin>
           <plugin>
-            <groupId>org.xolstice.maven.plugins</groupId>
-            <artifactId>protobuf-maven-plugin</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-maven-plugins</artifactId>
             <configuration>
-              <protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
+              <protocVersion>${internal.protobuf.version}</protocVersion>
             </configuration>
             <executions>
               <execution>
                 <id>compile-protoc</id>
                 <phase>generate-sources</phase>
                 <goals>
-                  <goal>compile</goal>
+                  <goal>protoc</goal>
                 </goals>
                 <configuration>
-                  <outputDirectory>${sources.dir}</outputDirectory>
+                  <imports>
+                    <param>${basedir}/src/main/protobuf</param>
+                  </imports>
+                  <source>
+                    <directory>${basedir}/src/main/protobuf</directory>
+                    <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -->
+                    <includes>
+                      <include>Admin.proto</include>
+                      <include>Cell.proto</include>
+                      <include>Client.proto</include>
+                      <include>ClusterId.proto</include>
+                      <include>ClusterStatus.proto</include>
+                      <include>Comparator.proto</include>
+                      <include>Encryption.proto</include>
+                      <include>ErrorHandling.proto</include>
+                      <include>FS.proto</include>
+                      <include>Filter.proto</include>
+                      <include>HBase.proto</include>
+                      <include>HFile.proto</include>
+                      <include>LoadBalancer.proto</include>
+                      <include>MapReduce.proto</include>
+                      <include>Master.proto</include>
+                      <include>MasterProcedure.proto</include>
+                      <include>Procedure.proto</include>
+                      <include>Quota.proto</include>
+                      <include>RPC.proto</include>
+                      <include>RegionNormalizer.proto</include>
+                      <include>RegionServerStatus.proto</include>
+                      <include>Snapshot.proto</include>
+                      <include>Tracing.proto</include>
+                      <include>WAL.proto</include>
+                      <include>ZooKeeper.proto</include>
+                      <include>TestProcedure.proto</include>
+                      <include>test.proto</include>
+                      <include>test_rpc_service.proto</include>
+                    </includes>
+                  </source>
+                  <output>${sources.dir}</output>
                 </configuration>
               </execution>
             </executions>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto b/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto
new file mode 100644
index 0000000..4c40429
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto
@@ -0,0 +1,28 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import "CellMessage.proto";
+
+package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
+
+message CellSet {
+  message Row {
+    required bytes key = 1;
+    repeated Cell values = 2;
+  }
+  repeated Row rows = 1;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto b/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto
new file mode 100644
index 0000000..80fe606
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Defines a protocol to perform multi row transactions.
+ * See BaseRowProcessorEndpoint for the implementation.
+ * See HRegion#processRowsWithLocks() for details.
+ */
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated";
+option java_outer_classname = "RowProcessorProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message ProcessRequest {
+  required string row_processor_class_name = 1;
+  optional string row_processor_initializer_message_name = 2;
+  optional bytes  row_processor_initializer_message = 3;
+  optional uint64 nonce_group = 4;
+  optional uint64 nonce = 5;
+}
+
+message ProcessResponse {
+  required bytes row_processor_result = 1;
+}
+
+service RowProcessorService {
+  rpc Process(ProcessRequest) returns (ProcessResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol/README.txt
----------------------------------------------------------------------
diff --git a/hbase-protocol/README.txt b/hbase-protocol/README.txt
index 4a77bf4..a3e11a2 100644
--- a/hbase-protocol/README.txt
+++ b/hbase-protocol/README.txt
@@ -15,21 +15,23 @@ protobuf, it is obtainable from here:
 
  https://github.com/google/protobuf/releases/tag/v2.5.0
 
-To generate java files from protos run:
+HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
+compile the protoc definitions by invoking maven with profile compile-protobuf or
+passing in compile-protobuf property.
 
- $ mvn compile -Dcompile-protobuf
+mvn compile -Dcompile-protobuf
 or
- $ mvn compile -Pcompile-protobuf
+mvn compile -Pcompile-protobuf
 
-You may also want to define protocExecutable for the protoc binary
+You may also want to define protoc.path for the protoc binary
 
-mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
 
 If you have added a new proto file, you should add it to the pom.xml file first.
 Other modules also support the maven profile.
 
 NOTE: The protoc used here is probably NOT the same as the hbase-protocol-shaded
-module uses; here we use a more palatable version -- 2.5.0 -- whereas over in
+module uses; here we use a more palatable version -- 2.5.0 -- wherease over in
 the internal hbase-protocol-shaded module, we'd use something newer. Be conscious
 of this when running your protoc being sure to apply the appropriate version
 per module.

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index a3d99b2..e21617b 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -159,15 +159,60 @@
           <build>
             <plugins>
               <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-maven-plugins</artifactId>
                 <executions>
                   <execution>
                     <id>compile-protoc</id>
                     <phase>generate-sources</phase>
                     <goals>
-                      <goal>compile</goal>
+                      <goal>protoc</goal>
                     </goals>
+                    <configuration>
+                      <imports>
+                        <param>${basedir}/src/main/protobuf</param>
+                      </imports>
+                      <source>
+                        <directory>${basedir}/src/main/protobuf</directory>
+                        <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -se-protocol/src/main/protobuf//AccessControl.proto
+                             -->
+                        <includes>
+                          <include>AccessControl.proto</include>
+                          <include>Admin.proto</include>
+                          <include>Authentication.proto</include>
+                          <include>Cell.proto</include>
+                          <include>Client.proto</include>
+                          <include>ClusterId.proto</include>
+                          <include>ClusterStatus.proto</include>
+                          <include>Comparator.proto</include>
+                          <include>Encryption.proto</include>
+                          <include>ErrorHandling.proto</include>
+                          <include>FS.proto</include>
+                          <include>Filter.proto</include>
+                          <include>HBase.proto</include>
+                          <include>HFile.proto</include>
+                          <include>LoadBalancer.proto</include>
+                          <include>MapReduce.proto</include>
+                          <include>MultiRowMutation.proto</include>
+                          <include>Quota.proto</include>
+                          <include>RPC.proto</include>
+                          <include>RowProcessor.proto</include>
+                          <include>Snapshot.proto</include>
+                          <!--These two test proto files are in shaded and non-shaded form
+                               used both sides testing-->
+                          <include>test.proto</include>
+                          <include>test_rpc_service.proto</include>
+                          <include>Tracing.proto</include>
+                          <include>VisibilityLabels.proto</include>
+                          <include>WAL.proto</include>
+                          <include>ZooKeeper.proto</include>
+                          <include>PingProtocol.proto</include>
+                        </includes>
+                      </source>
+                      <!--<output>${project.build.directory}/generated-sources/java</output>-->
+                     <output>${basedir}/src/main/java/</output>
+                    </configuration>
                   </execution>
                 </executions>
               </plugin>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
deleted file mode 100644
index f065550..0000000
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
+++ /dev/null
@@ -1,530 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: TestProcedure.proto
-
-package org.apache.hadoop.hbase.ipc.protobuf.generated;
-
-public final class TestProcedureProtos {
-  private TestProcedureProtos() {}
-  public static void registerAllExtensions(
-      com.google.protobuf.ExtensionRegistry registry) {
-  }
-  public interface TestTableDDLStateDataOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // required string table_name = 1;
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    boolean hasTableName();
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    java.lang.String getTableName();
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    com.google.protobuf.ByteString
-        getTableNameBytes();
-  }
-  /**
-   * Protobuf type {@code TestTableDDLStateData}
-   */
-  public static final class TestTableDDLStateData extends
-      com.google.protobuf.GeneratedMessage
-      implements TestTableDDLStateDataOrBuilder {
-    // Use TestTableDDLStateData.newBuilder() to construct.
-    private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
-      super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final TestTableDDLStateData defaultInstance;
-    public static TestTableDDLStateData getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public TestTableDDLStateData getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private TestTableDDLStateData(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              tableName_ = input.readBytes();
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<TestTableDDLStateData> PARSER =
-        new com.google.protobuf.AbstractParser<TestTableDDLStateData>() {
-      public TestTableDDLStateData parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new TestTableDDLStateData(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<TestTableDDLStateData> getParserForType() {
-      return PARSER;
-    }
-
-    private int bitField0_;
-    // required string table_name = 1;
-    public static final int TABLE_NAME_FIELD_NUMBER = 1;
-    private java.lang.Object tableName_;
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    public boolean hasTableName() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    public java.lang.String getTableName() {
-      java.lang.Object ref = tableName_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          tableName_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>required string table_name = 1;</code>
-     */
-    public com.google.protobuf.ByteString
-        getTableNameBytes() {
-      java.lang.Object ref = tableName_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        tableName_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    private void initFields() {
-      tableName_ = "";
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasTableName()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, getTableNameBytes());
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getTableNameBytes());
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * Protobuf type {@code TestTableDDLStateData}
-     */
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        tableName_ = "";
-        bitField0_ = (bitField0_ & ~0x00000001);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData build() {
-        org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData buildPartial() {
-        org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.tableName_ = tableName_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) {
-          return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other) {
-        if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance()) return this;
-        if (other.hasTableName()) {
-          bitField0_ |= 0x00000001;
-          tableName_ = other.tableName_;
-          onChanged();
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasTableName()) {
-          
-          return false;
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      // required string table_name = 1;
-      private java.lang.Object tableName_ = "";
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public boolean hasTableName() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public java.lang.String getTableName() {
-        java.lang.Object ref = tableName_;
-        if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          tableName_ = s;
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public com.google.protobuf.ByteString
-          getTableNameBytes() {
-        java.lang.Object ref = tableName_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b = 
-              com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          tableName_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public Builder setTableName(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        tableName_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public Builder clearTableName() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        tableName_ = getDefaultInstance().getTableName();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string table_name = 1;</code>
-       */
-      public Builder setTableNameBytes(
-          com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        tableName_ = value;
-        onChanged();
-        return this;
-      }
-
-      // @@protoc_insertion_point(builder_scope:TestTableDDLStateData)
-    }
-
-    static {
-      defaultInstance = new TestTableDDLStateData(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:TestTableDDLStateData)
-  }
-
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_TestTableDDLStateData_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_TestTableDDLStateData_fieldAccessorTable;
-
-  public static com.google.protobuf.Descriptors.FileDescriptor
-      getDescriptor() {
-    return descriptor;
-  }
-  private static com.google.protobuf.Descriptors.FileDescriptor
-      descriptor;
-  static {
-    java.lang.String[] descriptorData = {
-      "\n\023TestProcedure.proto\"+\n\025TestTableDDLSta" +
-      "teData\022\022\n\ntable_name\030\001 \002(\tBH\n.org.apache" +
-      ".hadoop.hbase.ipc.protobuf.generatedB\023Te" +
-      "stProcedureProtos\210\001\001"
-    };
-    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
-      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
-        public com.google.protobuf.ExtensionRegistry assignDescriptors(
-            com.google.protobuf.Descriptors.FileDescriptor root) {
-          descriptor = root;
-          internal_static_TestTableDDLStateData_descriptor =
-            getDescriptor().getMessageTypes().get(0);
-          internal_static_TestTableDDLStateData_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_TestTableDDLStateData_descriptor,
-              new java.lang.String[] { "TableName", });
-          return null;
-        }
-      };
-    com.google.protobuf.Descriptors.FileDescriptor
-      .internalBuildGeneratedFileFrom(descriptorData,
-        new com.google.protobuf.Descriptors.FileDescriptor[] {
-        }, assigner);
-  }
-
-  // @@protoc_insertion_point(outer_class_scope)
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-rest/README.txt
----------------------------------------------------------------------
diff --git a/hbase-rest/README.txt b/hbase-rest/README.txt
deleted file mode 100644
index 402f7df..0000000
--- a/hbase-rest/README.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-This maven module has the protobuf definition files used by REST.
-
-The produced java classes are generated and then checked in. The reasoning is
-that they change infrequently.
-
-To regenerate the classes after making definition file changes, in here or over
-in hbase-protocol since we source some of those protos in this package, ensure
-first that the protobuf protoc tool is in your $PATH. You may need to download
-it and build it first; it is part of the protobuf package. For example, if using
-v2.5.0 of protobuf, it is obtainable from here:
-
- https://github.com/google/protobuf/releases/tag/v2.5.0
-
-You can compile the protoc definitions by invoking maven with profile compile-protobuf
-or passing in compile-protobuf property.
-
- $ mvn compile -Dcompile-protobuf
-or
- $ mvn compile -Pcompile-protobuf
-
-You may also want to define protocExecutable for the protoc binary
-
- $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
-
-After you've done the above, check it in and then check it in (or post a patch
-on a JIRA with your definition file changes and the generated files).

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 53fbad4..8a270d7 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -353,15 +353,40 @@
           <build>
             <plugins>
               <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-maven-plugins</artifactId>
                 <executions>
                   <execution>
                     <id>compile-protoc</id>
                     <phase>generate-sources</phase>
                     <goals>
-                      <goal>compile</goal>
+                      <goal>protoc</goal>
                     </goals>
+                    <configuration>
+                      <imports>
+                        <param>${basedir}/src/main/protobuf</param>
+                      </imports>
+                      <source>
+                        <directory>${basedir}/src/main/protobuf</directory>
+                        <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -->
+                        <includes>
+                          <include>CellMessage.proto</include>
+                          <include>CellSetMessage.proto</include>
+                          <include>ColumnSchemaMessage.proto</include>
+                          <include>NamespacePropertiesMessage.proto</include>
+                          <include>NamespacesMessage.proto</include>
+                          <include>ScannerMessage.proto</include>
+                          <include>StorageClusterStatusMessage.proto</include>
+                          <include>TableInfoMessage.proto</include>
+                          <include>TableListMessage.proto</include>
+                          <include>TableSchemaMessage.proto</include>
+                          <include>VersionMessage.proto</include>
+                        </includes>
+                      </source>
+                      <!--<output>${project.build.directory}/generated-sources/java</output>-->
+                     <output>${basedir}/src/main/java/</output>
+                    </configuration>
                   </execution>
                 </executions>
               </plugin>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-rsgroup/README.txt
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/README.txt b/hbase-rsgroup/README.txt
index 0491a2a..1e247a9 100644
--- a/hbase-rsgroup/README.txt
+++ b/hbase-rsgroup/README.txt
@@ -1,26 +1,30 @@
-This maven module has the protobuf definition files used by regionserver grouping.
+These are the protobuf definition files used by the region grouping feature.
+The protos here are used by the region grouping coprocessor endpoints.
 
-The produced java classes are generated and then checked in. The reasoning is
-that they change infrequently.
+The produced java classes are generated and then checked in. The reasoning
+is that they change infrequently and it saves generating anew on each build.
 
-To regenerate the classes after making definition file changes, in here or over
-in hbase-protocol since we source some of those protos in this package, ensure
-first that the protobuf protoc tool is in your $PATH. You may need to download
-it and build it first; it is part of the protobuf package. For example, if using
-v2.5.0 of protobuf, it is obtainable from here:
+To regenerate the classes after making definition file changes, ensure first that
+the protobuf protoc tool is in your $PATH. You may need to download it and build
+it first; its part of the protobuf package. For example, if using v2.5.0 of
+protobuf, it is obtainable from here:
 
  https://github.com/google/protobuf/releases/tag/v2.5.0
 
-You can compile the protoc definitions by invoking maven with profile compile-protobuf
-or passing in compile-protobuf property.
+HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
+compile the protoc definitions by invoking maven with profile compile-protobuf or
+passing in compile-protobuf property.
 
- $ mvn compile -Dcompile-protobuf
+mvn compile -Dcompile-protobuf
 or
- $ mvn compile -Pcompile-protobuf
+mvn compile -Pcompile-protobuf
 
-You may also want to define protocExecutable for the protoc binary
+You may also want to define protoc.path for the protoc binary
 
- $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
+
+If you have added a new proto file, you should add it to the pom.xml file first.
+Other modules also support the maven profile.
 
 After you've done the above, check it in and then check it in (or post a patch
 on a JIRA with your definition file changes and the generated files).

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-rsgroup/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml
index 7534a65..58802cf 100644
--- a/hbase-rsgroup/pom.xml
+++ b/hbase-rsgroup/pom.xml
@@ -226,34 +226,6 @@
         <surefire.skipFirstPart>true</surefire.skipFirstPart>
       </properties>
     </profile>
-    <profile>
-      <id>compile-protobuf</id>
-      <activation>
-        <property>
-          <name>compile-protobuf</name>
-        </property>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.xolstice.maven.plugins</groupId>
-            <artifactId>protobuf-maven-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>compile-protoc</id>
-                <phase>generate-sources</phase>
-                <goals>
-                  <goal>compile</goal>
-                </goals>
-                <configuration>
-                  <protoSourceRoot>${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf</protoSourceRoot>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-        </profile>
 
     <!-- profile against Hadoop 2.x: This is the default. -->
     <profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-spark/README.txt
----------------------------------------------------------------------
diff --git a/hbase-spark/README.txt b/hbase-spark/README.txt
index a60a964..5569924 100644
--- a/hbase-spark/README.txt
+++ b/hbase-spark/README.txt
@@ -11,16 +11,25 @@ v2.5.0 of protobuf, it is obtainable from here:
 
  https://github.com/google/protobuf/releases/tag/v2.5.0
 
-You can compile the protoc definitions by invoking maven with profile compile-protobuf
-or passing in compile-protobuf property.
+HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
+compile the protoc definitions by invoking maven with profile compile-protobuf or
+passing in compile-protobuf property.
 
- $ mvn compile -Dcompile-protobuf
+mvn compile -Dcompile-protobuf
 or
- $ mvn compile -Pcompile-protobuf
+mvn compile -Pcompile-protobuf
 
-You may also want to define protocExecutable for the protoc binary
+You may also want to define protoc.path for the protoc binary
 
- $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
+
+If you have added a new proto file, you should add it to the pom.xml file first.
+Other modules also support the maven profile.
 
 After you've done the above, check it in and then check it in (or post a patch
 on a JIRA with your definition file changes and the generated files).
+
+NOTE: The maven protoc plugin is a little broken. It will only source one dir
+at a time. If changes in protobuf files, you will have to first do protoc with
+the src directory pointing back into hbase-protocol module and then rerun it
+after editing the pom to point in here to source .proto files.

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/hbase-spark/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index 0a96f9c..a60b384 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -716,15 +716,30 @@
           <build>
             <plugins>
               <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-maven-plugins</artifactId>
                 <executions>
                   <execution>
                     <id>compile-protoc</id>
                     <phase>generate-sources</phase>
                     <goals>
-                      <goal>compile</goal>
+                      <goal>protoc</goal>
                     </goals>
+                    <configuration>
+                      <imports>
+                        <param>${basedir}/src/main/protobuf</param>
+                      </imports>
+                      <source>
+                        <directory>${basedir}/src/main/protobuf</directory>
+                        <!-- Unfortunately, Hadoop plugin does not support *.proto.
+                             We have to individually list every proto file here -->
+                        <includes>
+                          <include>SparkFilter.proto</include>
+                        </includes>
+                      </source>
+                      <!--<output>${project.build.directory}/generated-sources/java</output>-->
+                     <output>${basedir}/src/main/java/</output>
+                    </configuration>
                   </execution>
                 </executions>
               </plugin>

http://git-wip-us.apache.org/repos/asf/hbase/blob/738ff821/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 38c8978..ad77f05 100644
--- a/pom.xml
+++ b/pom.xml
@@ -811,13 +811,12 @@
           </configuration>
         </plugin>
         <plugin>
-          <groupId>org.xolstice.maven.plugins</groupId>
-          <artifactId>protobuf-maven-plugin</artifactId>
-          <version>${protobuf.plugin.version}</version>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-maven-plugins</artifactId>
+          <version>${hadoop-two.version}</version>
           <configuration>
-            <protoSourceRoot>${basedir}/src/main/protobuf/</protoSourceRoot>
-            <outputDirectory>${basedir}/src/main/java/</outputDirectory>
-            <clearOutputDirectory>false</clearOutputDirectory>
+            <protocVersion>${protobuf.version}</protocVersion>
+            <protocCommand>${protoc.path}</protocCommand>
           </configuration>
         </plugin>
         <plugin>
@@ -1216,7 +1215,6 @@
     <log4j.version>1.2.17</log4j.version>
     <mockito-all.version>1.10.8</mockito-all.version>
     <protobuf.version>2.5.0</protobuf.version>
-    <protobuf.plugin.version>0.5.0</protobuf.plugin.version>
     <thrift.path>thrift</thrift.path>
     <thrift.version>0.9.3</thrift.version>
     <zookeeper.version>3.4.8</zookeeper.version>