You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/07/10 18:31:09 UTC

svn commit: r1501835 [1/8] - in /hbase/branches/0.95: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protocol/src/main/protobuf/ hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ hbase-server/src/test/ja...

Author: stack
Date: Wed Jul 10 16:31:08 2013
New Revision: 1501835

URL: http://svn.apache.org/r1501835
Log:
HBASE-8915 protobuf message style

Modified:
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/AccessControl.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Admin.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Aggregate.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Authentication.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Cell.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Client.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/ClusterId.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/ClusterStatus.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Comparator.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/ErrorHandling.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Filter.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/HFile.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/LoadBalancer.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Master.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/MasterAdmin.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/MasterMonitor.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/MultiRowMutation.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/RPC.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/RegionServerStatus.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/RowProcessor.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/Tracing.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/WAL.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/ZooKeeper.proto
    hbase/branches/0.95/hbase-protocol/src/main/protobuf/hbase.proto
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java?rev=1501835&r1=1501834&r2=1501835&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java Wed Jul 10 16:31:08 2013
@@ -6422,11 +6422,11 @@ public final class AccessControlProtos {
       "Permission\":\n\027CheckPermissionsRequest\022\037\n" +
       "\npermission\030\001 \003(\0132\013.Permission\"\032\n\030CheckP" +
       "ermissionsResponse2\373\001\n\024AccessControlServ" +
-      "ice\022&\n\005grant\022\r.GrantRequest\032\016.GrantRespo",
-      "nse\022)\n\006revoke\022\016.RevokeRequest\032\017.RevokeRe" +
-      "sponse\022G\n\022getUserPermissions\022\027.UserPermi" +
+      "ice\022&\n\005Grant\022\r.GrantRequest\032\016.GrantRespo",
+      "nse\022)\n\006Revoke\022\016.RevokeRequest\032\017.RevokeRe" +
+      "sponse\022G\n\022GetUserPermissions\022\027.UserPermi" +
       "ssionsRequest\032\030.UserPermissionsResponse\022" +
-      "G\n\020checkPermissions\022\030.CheckPermissionsRe" +
+      "G\n\020CheckPermissions\022\030.CheckPermissionsRe" +
       "quest\032\031.CheckPermissionsResponseBI\n*org." +
       "apache.hadoop.hbase.protobuf.generatedB\023" +
       "AccessControlProtosH\001\210\001\001\240\001\001"

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java?rev=1501835&r1=1501834&r2=1501835&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java Wed Jul 10 16:31:08 2013
@@ -16,7 +16,7 @@ public final class AdminProtos {
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
     
-    // optional bool compactionState = 2;
+    // optional bool compaction_state = 2;
     boolean hasCompactionState();
     boolean getCompactionState();
   }
@@ -62,8 +62,8 @@ public final class AdminProtos {
       return region_;
     }
     
-    // optional bool compactionState = 2;
-    public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
+    // optional bool compaction_state = 2;
+    public static final int COMPACTION_STATE_FIELD_NUMBER = 2;
     private boolean compactionState_;
     public boolean hasCompactionState() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -166,7 +166,7 @@ public final class AdminProtos {
         hash = (53 * hash) + getRegion().hashCode();
       }
       if (hasCompactionState()) {
-        hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER;
+        hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER;
         hash = (53 * hash) + hashBoolean(getCompactionState());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -515,7 +515,7 @@ public final class AdminProtos {
         return regionBuilder_;
       }
       
-      // optional bool compactionState = 2;
+      // optional bool compaction_state = 2;
       private boolean compactionState_ ;
       public boolean hasCompactionState() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -550,12 +550,12 @@ public final class AdminProtos {
   public interface GetRegionInfoResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required .RegionInfo regionInfo = 1;
+    // required .RegionInfo region_info = 1;
     boolean hasRegionInfo();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
     
-    // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
+    // optional .GetRegionInfoResponse.CompactionState compaction_state = 2;
     boolean hasCompactionState();
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState();
   }
@@ -663,8 +663,8 @@ public final class AdminProtos {
     }
     
     private int bitField0_;
-    // required .RegionInfo regionInfo = 1;
-    public static final int REGIONINFO_FIELD_NUMBER = 1;
+    // required .RegionInfo region_info = 1;
+    public static final int REGION_INFO_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
     public boolean hasRegionInfo() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -676,8 +676,8 @@ public final class AdminProtos {
       return regionInfo_;
     }
     
-    // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
-    public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
+    // optional .GetRegionInfoResponse.CompactionState compaction_state = 2;
+    public static final int COMPACTION_STATE_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_;
     public boolean hasCompactionState() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -776,11 +776,11 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRegionInfo()) {
-        hash = (37 * hash) + REGIONINFO_FIELD_NUMBER;
+        hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
         hash = (53 * hash) + getRegionInfo().hashCode();
       }
       if (hasCompactionState()) {
-        hash = (37 * hash) + COMPACTIONSTATE_FIELD_NUMBER;
+        hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER;
         hash = (53 * hash) + hashEnum(getCompactionState());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -1045,7 +1045,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // required .RegionInfo regionInfo = 1;
+      // required .RegionInfo region_info = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
@@ -1135,7 +1135,7 @@ public final class AdminProtos {
         return regionInfoBuilder_;
       }
       
-      // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
+      // optional .GetRegionInfoResponse.CompactionState compaction_state = 2;
       private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
       public boolean hasCompactionState() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -1757,7 +1757,7 @@ public final class AdminProtos {
   public interface GetStoreFileResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated string storeFile = 1;
+    // repeated string store_file = 1;
     java.util.List<String> getStoreFileList();
     int getStoreFileCount();
     String getStoreFile(int index);
@@ -1790,8 +1790,8 @@ public final class AdminProtos {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable;
     }
     
-    // repeated string storeFile = 1;
-    public static final int STOREFILE_FIELD_NUMBER = 1;
+    // repeated string store_file = 1;
+    public static final int STORE_FILE_FIELD_NUMBER = 1;
     private com.google.protobuf.LazyStringList storeFile_;
     public java.util.List<String>
         getStoreFileList() {
@@ -1875,7 +1875,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getStoreFileCount() > 0) {
-        hash = (37 * hash) + STOREFILE_FIELD_NUMBER;
+        hash = (37 * hash) + STORE_FILE_FIELD_NUMBER;
         hash = (53 * hash) + getStoreFileList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -2106,7 +2106,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // repeated string storeFile = 1;
+      // repeated string store_file = 1;
       private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
       private void ensureStoreFileIsMutable() {
         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -2476,7 +2476,7 @@ public final class AdminProtos {
   public interface GetOnlineRegionResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated .RegionInfo regionInfo = 1;
+    // repeated .RegionInfo region_info = 1;
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> 
         getRegionInfoList();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
@@ -2514,8 +2514,8 @@ public final class AdminProtos {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable;
     }
     
-    // repeated .RegionInfo regionInfo = 1;
-    public static final int REGIONINFO_FIELD_NUMBER = 1;
+    // repeated .RegionInfo region_info = 1;
+    public static final int REGION_INFO_FIELD_NUMBER = 1;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
       return regionInfo_;
@@ -2607,7 +2607,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getRegionInfoCount() > 0) {
-        hash = (37 * hash) + REGIONINFO_FIELD_NUMBER;
+        hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
         hash = (53 * hash) + getRegionInfoList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -2869,7 +2869,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // repeated .RegionInfo regionInfo = 1;
+      // repeated .RegionInfo region_info = 1;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
         java.util.Collections.emptyList();
       private void ensureRegionInfoIsMutable() {
@@ -3069,7 +3069,7 @@ public final class AdminProtos {
   public interface OpenRegionRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
+    // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> 
         getOpenInfoList();
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index);
@@ -3115,11 +3115,11 @@ public final class AdminProtos {
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion();
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder();
       
-      // optional uint32 versionOfOfflineNode = 2;
+      // optional uint32 version_of_offline_node = 2;
       boolean hasVersionOfOfflineNode();
       int getVersionOfOfflineNode();
       
-      // repeated .ServerName favoredNodes = 3;
+      // repeated .ServerName favored_nodes = 3;
       java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> 
           getFavoredNodesList();
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index);
@@ -3171,8 +3171,8 @@ public final class AdminProtos {
         return region_;
       }
       
-      // optional uint32 versionOfOfflineNode = 2;
-      public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2;
+      // optional uint32 version_of_offline_node = 2;
+      public static final int VERSION_OF_OFFLINE_NODE_FIELD_NUMBER = 2;
       private int versionOfOfflineNode_;
       public boolean hasVersionOfOfflineNode() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -3181,8 +3181,8 @@ public final class AdminProtos {
         return versionOfOfflineNode_;
       }
       
-      // repeated .ServerName favoredNodes = 3;
-      public static final int FAVOREDNODES_FIELD_NUMBER = 3;
+      // repeated .ServerName favored_nodes = 3;
+      public static final int FAVORED_NODES_FIELD_NUMBER = 3;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_;
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() {
         return favoredNodes_;
@@ -3312,11 +3312,11 @@ public final class AdminProtos {
           hash = (53 * hash) + getRegion().hashCode();
         }
         if (hasVersionOfOfflineNode()) {
-          hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER;
+          hash = (37 * hash) + VERSION_OF_OFFLINE_NODE_FIELD_NUMBER;
           hash = (53 * hash) + getVersionOfOfflineNode();
         }
         if (getFavoredNodesCount() > 0) {
-          hash = (37 * hash) + FAVOREDNODES_FIELD_NUMBER;
+          hash = (37 * hash) + FAVORED_NODES_FIELD_NUMBER;
           hash = (53 * hash) + getFavoredNodesList().hashCode();
         }
         hash = (29 * hash) + getUnknownFields().hashCode();
@@ -3719,7 +3719,7 @@ public final class AdminProtos {
           return regionBuilder_;
         }
         
-        // optional uint32 versionOfOfflineNode = 2;
+        // optional uint32 version_of_offline_node = 2;
         private int versionOfOfflineNode_ ;
         public boolean hasVersionOfOfflineNode() {
           return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -3740,7 +3740,7 @@ public final class AdminProtos {
           return this;
         }
         
-        // repeated .ServerName favoredNodes = 3;
+        // repeated .ServerName favored_nodes = 3;
         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_ =
           java.util.Collections.emptyList();
         private void ensureFavoredNodesIsMutable() {
@@ -3937,8 +3937,8 @@ public final class AdminProtos {
       // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo)
     }
     
-    // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
-    public static final int OPENINFO_FIELD_NUMBER = 1;
+    // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;
+    public static final int OPEN_INFO_FIELD_NUMBER = 1;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_;
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList() {
       return openInfo_;
@@ -4030,7 +4030,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getOpenInfoCount() > 0) {
-        hash = (37 * hash) + OPENINFO_FIELD_NUMBER;
+        hash = (37 * hash) + OPEN_INFO_FIELD_NUMBER;
         hash = (53 * hash) + getOpenInfoList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -4292,7 +4292,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // repeated .OpenRegionRequest.RegionOpenInfo openInfo = 1;
+      // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_ =
         java.util.Collections.emptyList();
       private void ensureOpenInfoIsMutable() {
@@ -4492,7 +4492,7 @@ public final class AdminProtos {
   public interface OpenRegionResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
+    // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList();
     int getOpeningStateCount();
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index);
@@ -4597,8 +4597,8 @@ public final class AdminProtos {
       // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState)
     }
     
-    // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
-    public static final int OPENINGSTATE_FIELD_NUMBER = 1;
+    // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;
+    public static final int OPENING_STATE_FIELD_NUMBER = 1;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_;
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList() {
       return openingState_;
@@ -4681,7 +4681,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getOpeningStateCount() > 0) {
-        hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER;
+        hash = (37 * hash) + OPENING_STATE_FIELD_NUMBER;
         hash = (53 * hash) + hashEnumList(getOpeningStateList());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -4931,7 +4931,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // repeated .OpenRegionResponse.RegionOpeningState openingState = 1;
+      // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_ =
         java.util.Collections.emptyList();
       private void ensureOpeningStateIsMutable() {
@@ -5001,15 +5001,15 @@ public final class AdminProtos {
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
     
-    // optional uint32 versionOfClosingNode = 2;
+    // optional uint32 version_of_closing_node = 2;
     boolean hasVersionOfClosingNode();
     int getVersionOfClosingNode();
     
-    // optional bool transitionInZK = 3 [default = true];
+    // optional bool transition_in_ZK = 3 [default = true];
     boolean hasTransitionInZK();
     boolean getTransitionInZK();
     
-    // optional .ServerName destinationServer = 4;
+    // optional .ServerName destination_server = 4;
     boolean hasDestinationServer();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder();
@@ -5056,8 +5056,8 @@ public final class AdminProtos {
       return region_;
     }
     
-    // optional uint32 versionOfClosingNode = 2;
-    public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2;
+    // optional uint32 version_of_closing_node = 2;
+    public static final int VERSION_OF_CLOSING_NODE_FIELD_NUMBER = 2;
     private int versionOfClosingNode_;
     public boolean hasVersionOfClosingNode() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -5066,8 +5066,8 @@ public final class AdminProtos {
       return versionOfClosingNode_;
     }
     
-    // optional bool transitionInZK = 3 [default = true];
-    public static final int TRANSITIONINZK_FIELD_NUMBER = 3;
+    // optional bool transition_in_ZK = 3 [default = true];
+    public static final int TRANSITION_IN_ZK_FIELD_NUMBER = 3;
     private boolean transitionInZK_;
     public boolean hasTransitionInZK() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -5076,8 +5076,8 @@ public final class AdminProtos {
       return transitionInZK_;
     }
     
-    // optional .ServerName destinationServer = 4;
-    public static final int DESTINATIONSERVER_FIELD_NUMBER = 4;
+    // optional .ServerName destination_server = 4;
+    public static final int DESTINATION_SERVER_FIELD_NUMBER = 4;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_;
     public boolean hasDestinationServer() {
       return ((bitField0_ & 0x00000008) == 0x00000008);
@@ -5215,15 +5215,15 @@ public final class AdminProtos {
         hash = (53 * hash) + getRegion().hashCode();
       }
       if (hasVersionOfClosingNode()) {
-        hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER;
+        hash = (37 * hash) + VERSION_OF_CLOSING_NODE_FIELD_NUMBER;
         hash = (53 * hash) + getVersionOfClosingNode();
       }
       if (hasTransitionInZK()) {
-        hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER;
+        hash = (37 * hash) + TRANSITION_IN_ZK_FIELD_NUMBER;
         hash = (53 * hash) + hashBoolean(getTransitionInZK());
       }
       if (hasDestinationServer()) {
-        hash = (37 * hash) + DESTINATIONSERVER_FIELD_NUMBER;
+        hash = (37 * hash) + DESTINATION_SERVER_FIELD_NUMBER;
         hash = (53 * hash) + getDestinationServer().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -5619,7 +5619,7 @@ public final class AdminProtos {
         return regionBuilder_;
       }
       
-      // optional uint32 versionOfClosingNode = 2;
+      // optional uint32 version_of_closing_node = 2;
       private int versionOfClosingNode_ ;
       public boolean hasVersionOfClosingNode() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -5640,7 +5640,7 @@ public final class AdminProtos {
         return this;
       }
       
-      // optional bool transitionInZK = 3 [default = true];
+      // optional bool transition_in_ZK = 3 [default = true];
       private boolean transitionInZK_ = true;
       public boolean hasTransitionInZK() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -5661,7 +5661,7 @@ public final class AdminProtos {
         return this;
       }
       
-      // optional .ServerName destinationServer = 4;
+      // optional .ServerName destination_server = 4;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_;
@@ -6149,7 +6149,7 @@ public final class AdminProtos {
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
     
-    // optional uint64 ifOlderThanTs = 2;
+    // optional uint64 if_older_than_ts = 2;
     boolean hasIfOlderThanTs();
     long getIfOlderThanTs();
   }
@@ -6195,8 +6195,8 @@ public final class AdminProtos {
       return region_;
     }
     
-    // optional uint64 ifOlderThanTs = 2;
-    public static final int IFOLDERTHANTS_FIELD_NUMBER = 2;
+    // optional uint64 if_older_than_ts = 2;
+    public static final int IF_OLDER_THAN_TS_FIELD_NUMBER = 2;
     private long ifOlderThanTs_;
     public boolean hasIfOlderThanTs() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -6299,7 +6299,7 @@ public final class AdminProtos {
         hash = (53 * hash) + getRegion().hashCode();
       }
       if (hasIfOlderThanTs()) {
-        hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER;
+        hash = (37 * hash) + IF_OLDER_THAN_TS_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getIfOlderThanTs());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -6648,7 +6648,7 @@ public final class AdminProtos {
         return regionBuilder_;
       }
       
-      // optional uint64 ifOlderThanTs = 2;
+      // optional uint64 if_older_than_ts = 2;
       private long ifOlderThanTs_ ;
       public boolean hasIfOlderThanTs() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -6683,7 +6683,7 @@ public final class AdminProtos {
   public interface FlushRegionResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required uint64 lastFlushTime = 1;
+    // required uint64 last_flush_time = 1;
     boolean hasLastFlushTime();
     long getLastFlushTime();
     
@@ -6720,8 +6720,8 @@ public final class AdminProtos {
     }
     
     private int bitField0_;
-    // required uint64 lastFlushTime = 1;
-    public static final int LASTFLUSHTIME_FIELD_NUMBER = 1;
+    // required uint64 last_flush_time = 1;
+    public static final int LAST_FLUSH_TIME_FIELD_NUMBER = 1;
     private long lastFlushTime_;
     public boolean hasLastFlushTime() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -6826,7 +6826,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasLastFlushTime()) {
-        hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER;
+        hash = (37 * hash) + LAST_FLUSH_TIME_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getLastFlushTime());
       }
       if (hasFlushed()) {
@@ -7072,7 +7072,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // required uint64 lastFlushTime = 1;
+      // required uint64 last_flush_time = 1;
       private long lastFlushTime_ ;
       public boolean hasLastFlushTime() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -7133,7 +7133,7 @@ public final class AdminProtos {
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
     
-    // optional bytes splitPoint = 2;
+    // optional bytes split_point = 2;
     boolean hasSplitPoint();
     com.google.protobuf.ByteString getSplitPoint();
   }
@@ -7179,8 +7179,8 @@ public final class AdminProtos {
       return region_;
     }
     
-    // optional bytes splitPoint = 2;
-    public static final int SPLITPOINT_FIELD_NUMBER = 2;
+    // optional bytes split_point = 2;
+    public static final int SPLIT_POINT_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString splitPoint_;
     public boolean hasSplitPoint() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -7283,7 +7283,7 @@ public final class AdminProtos {
         hash = (53 * hash) + getRegion().hashCode();
       }
       if (hasSplitPoint()) {
-        hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER;
+        hash = (37 * hash) + SPLIT_POINT_FIELD_NUMBER;
         hash = (53 * hash) + getSplitPoint().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -7632,7 +7632,7 @@ public final class AdminProtos {
         return regionBuilder_;
       }
       
-      // optional bytes splitPoint = 2;
+      // optional bytes split_point = 2;
       private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY;
       public boolean hasSplitPoint() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -8878,12 +8878,12 @@ public final class AdminProtos {
   public interface MergeRegionsRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required .RegionSpecifier regionA = 1;
+    // required .RegionSpecifier region_a = 1;
     boolean hasRegionA();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder();
     
-    // required .RegionSpecifier regionB = 2;
+    // required .RegionSpecifier region_b = 2;
     boolean hasRegionB();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder();
@@ -8921,8 +8921,8 @@ public final class AdminProtos {
     }
     
     private int bitField0_;
-    // required .RegionSpecifier regionA = 1;
-    public static final int REGIONA_FIELD_NUMBER = 1;
+    // required .RegionSpecifier region_a = 1;
+    public static final int REGION_A_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_;
     public boolean hasRegionA() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -8934,8 +8934,8 @@ public final class AdminProtos {
       return regionA_;
     }
     
-    // required .RegionSpecifier regionB = 2;
-    public static final int REGIONB_FIELD_NUMBER = 2;
+    // required .RegionSpecifier region_b = 2;
+    public static final int REGION_B_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_;
     public boolean hasRegionB() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -9068,11 +9068,11 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRegionA()) {
-        hash = (37 * hash) + REGIONA_FIELD_NUMBER;
+        hash = (37 * hash) + REGION_A_FIELD_NUMBER;
         hash = (53 * hash) + getRegionA().hashCode();
       }
       if (hasRegionB()) {
-        hash = (37 * hash) + REGIONB_FIELD_NUMBER;
+        hash = (37 * hash) + REGION_B_FIELD_NUMBER;
         hash = (53 * hash) + getRegionB().hashCode();
       }
       if (hasForcible()) {
@@ -9370,7 +9370,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // required .RegionSpecifier regionA = 1;
+      // required .RegionSpecifier region_a = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_;
@@ -9460,7 +9460,7 @@ public final class AdminProtos {
         return regionABuilder_;
       }
       
-      // required .RegionSpecifier regionB = 2;
+      // required .RegionSpecifier region_b = 2;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_;
@@ -9890,12 +9890,12 @@ public final class AdminProtos {
     org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey();
     org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder();
     
-    // repeated bytes keyValueBytes = 2;
+    // repeated bytes key_value_bytes = 2;
     java.util.List<com.google.protobuf.ByteString> getKeyValueBytesList();
     int getKeyValueBytesCount();
     com.google.protobuf.ByteString getKeyValueBytes(int index);
     
-    // optional int32 associatedCellCount = 3;
+    // optional int32 associated_cell_count = 3;
     boolean hasAssociatedCellCount();
     int getAssociatedCellCount();
   }
@@ -9941,8 +9941,8 @@ public final class AdminProtos {
       return key_;
     }
     
-    // repeated bytes keyValueBytes = 2;
-    public static final int KEYVALUEBYTES_FIELD_NUMBER = 2;
+    // repeated bytes key_value_bytes = 2;
+    public static final int KEY_VALUE_BYTES_FIELD_NUMBER = 2;
     private java.util.List<com.google.protobuf.ByteString> keyValueBytes_;
     public java.util.List<com.google.protobuf.ByteString>
         getKeyValueBytesList() {
@@ -9955,8 +9955,8 @@ public final class AdminProtos {
       return keyValueBytes_.get(index);
     }
     
-    // optional int32 associatedCellCount = 3;
-    public static final int ASSOCIATEDCELLCOUNT_FIELD_NUMBER = 3;
+    // optional int32 associated_cell_count = 3;
+    public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 3;
     private int associatedCellCount_;
     public boolean hasAssociatedCellCount() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -10074,11 +10074,11 @@ public final class AdminProtos {
         hash = (53 * hash) + getKey().hashCode();
       }
       if (getKeyValueBytesCount() > 0) {
-        hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER;
+        hash = (37 * hash) + KEY_VALUE_BYTES_FIELD_NUMBER;
         hash = (53 * hash) + getKeyValueBytesList().hashCode();
       }
       if (hasAssociatedCellCount()) {
-        hash = (37 * hash) + ASSOCIATEDCELLCOUNT_FIELD_NUMBER;
+        hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
         hash = (53 * hash) + getAssociatedCellCount();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -10449,7 +10449,7 @@ public final class AdminProtos {
         return keyBuilder_;
       }
       
-      // repeated bytes keyValueBytes = 2;
+      // repeated bytes key_value_bytes = 2;
       private java.util.List<com.google.protobuf.ByteString> keyValueBytes_ = java.util.Collections.emptyList();;
       private void ensureKeyValueBytesIsMutable() {
         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
@@ -10500,7 +10500,7 @@ public final class AdminProtos {
         return this;
       }
       
-      // optional int32 associatedCellCount = 3;
+      // optional int32 associated_cell_count = 3;
       private int associatedCellCount_ ;
       public boolean hasAssociatedCellCount() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -11728,7 +11728,7 @@ public final class AdminProtos {
   public interface RollWALWriterResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated bytes regionToFlush = 1;
+    // repeated bytes region_to_flush = 1;
     java.util.List<com.google.protobuf.ByteString> getRegionToFlushList();
     int getRegionToFlushCount();
     com.google.protobuf.ByteString getRegionToFlush(int index);
@@ -11761,8 +11761,8 @@ public final class AdminProtos {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable;
     }
     
-    // repeated bytes regionToFlush = 1;
-    public static final int REGIONTOFLUSH_FIELD_NUMBER = 1;
+    // repeated bytes region_to_flush = 1;
+    public static final int REGION_TO_FLUSH_FIELD_NUMBER = 1;
     private java.util.List<com.google.protobuf.ByteString> regionToFlush_;
     public java.util.List<com.google.protobuf.ByteString>
         getRegionToFlushList() {
@@ -11846,7 +11846,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getRegionToFlushCount() > 0) {
-        hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER;
+        hash = (37 * hash) + REGION_TO_FLUSH_FIELD_NUMBER;
         hash = (53 * hash) + getRegionToFlushList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -12076,7 +12076,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // repeated bytes regionToFlush = 1;
+      // repeated bytes region_to_flush = 1;
       private java.util.List<com.google.protobuf.ByteString> regionToFlush_ = java.util.Collections.emptyList();;
       private void ensureRegionToFlushIsMutable() {
         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -13157,12 +13157,12 @@ public final class AdminProtos {
   public interface ServerInfoOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required .ServerName serverName = 1;
+    // required .ServerName server_name = 1;
     boolean hasServerName();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName();
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder();
     
-    // optional uint32 webuiPort = 2;
+    // optional uint32 webui_port = 2;
     boolean hasWebuiPort();
     int getWebuiPort();
   }
@@ -13195,8 +13195,8 @@ public final class AdminProtos {
     }
     
     private int bitField0_;
-    // required .ServerName serverName = 1;
-    public static final int SERVERNAME_FIELD_NUMBER = 1;
+    // required .ServerName server_name = 1;
+    public static final int SERVER_NAME_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_;
     public boolean hasServerName() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -13208,8 +13208,8 @@ public final class AdminProtos {
       return serverName_;
     }
     
-    // optional uint32 webuiPort = 2;
-    public static final int WEBUIPORT_FIELD_NUMBER = 2;
+    // optional uint32 webui_port = 2;
+    public static final int WEBUI_PORT_FIELD_NUMBER = 2;
     private int webuiPort_;
     public boolean hasWebuiPort() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -13308,11 +13308,11 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasServerName()) {
-        hash = (37 * hash) + SERVERNAME_FIELD_NUMBER;
+        hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER;
         hash = (53 * hash) + getServerName().hashCode();
       }
       if (hasWebuiPort()) {
-        hash = (37 * hash) + WEBUIPORT_FIELD_NUMBER;
+        hash = (37 * hash) + WEBUI_PORT_FIELD_NUMBER;
         hash = (53 * hash) + getWebuiPort();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -13571,7 +13571,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // required .ServerName serverName = 1;
+      // required .ServerName server_name = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_;
@@ -13661,7 +13661,7 @@ public final class AdminProtos {
         return serverNameBuilder_;
       }
       
-      // optional uint32 webuiPort = 2;
+      // optional uint32 webui_port = 2;
       private int webuiPort_ ;
       public boolean hasWebuiPort() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -13696,7 +13696,7 @@ public final class AdminProtos {
   public interface GetServerInfoResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required .ServerInfo serverInfo = 1;
+    // required .ServerInfo server_info = 1;
     boolean hasServerInfo();
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo getServerInfo();
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder getServerInfoOrBuilder();
@@ -13730,8 +13730,8 @@ public final class AdminProtos {
     }
     
     private int bitField0_;
-    // required .ServerInfo serverInfo = 1;
-    public static final int SERVERINFO_FIELD_NUMBER = 1;
+    // required .ServerInfo server_info = 1;
+    public static final int SERVER_INFO_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_;
     public boolean hasServerInfo() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -13820,7 +13820,7 @@ public final class AdminProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasServerInfo()) {
-        hash = (37 * hash) + SERVERINFO_FIELD_NUMBER;
+        hash = (37 * hash) + SERVER_INFO_FIELD_NUMBER;
         hash = (53 * hash) + getServerInfo().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -14065,7 +14065,7 @@ public final class AdminProtos {
       
       private int bitField0_;
       
-      // required .ServerInfo serverInfo = 1;
+      // required .ServerInfo server_info = 1;
       private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo serverInfo_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfoOrBuilder> serverInfoBuilder_;
@@ -15387,79 +15387,80 @@ public final class AdminProtos {
   static {
     java.lang.String[] descriptorData = {
       "\n\013Admin.proto\032\014Client.proto\032\013hbase.proto" +
-      "\032\tWAL.proto\"Q\n\024GetRegionInfoRequest\022 \n\006r" +
-      "egion\030\001 \002(\0132\020.RegionSpecifier\022\027\n\017compact" +
-      "ionState\030\002 \001(\010\"\301\001\n\025GetRegionInfoResponse" +
-      "\022\037\n\nregionInfo\030\001 \002(\0132\013.RegionInfo\022?\n\017com" +
-      "pactionState\030\002 \001(\0162&.GetRegionInfoRespon" +
-      "se.CompactionState\"F\n\017CompactionState\022\010\n" +
-      "\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJOR\020\002\022\023\n\017MAJOR_A" +
-      "ND_MINOR\020\003\"G\n\023GetStoreFileRequest\022 \n\006reg" +
-      "ion\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006family\030\002 ",
-      "\003(\014\")\n\024GetStoreFileResponse\022\021\n\tstoreFile" +
-      "\030\001 \003(\t\"\030\n\026GetOnlineRegionRequest\":\n\027GetO" +
-      "nlineRegionResponse\022\037\n\nregionInfo\030\001 \003(\0132" +
-      "\013.RegionInfo\"\270\001\n\021OpenRegionRequest\0223\n\010op" +
-      "enInfo\030\001 \003(\0132!.OpenRegionRequest.RegionO" +
-      "penInfo\032n\n\016RegionOpenInfo\022\033\n\006region\030\001 \002(" +
-      "\0132\013.RegionInfo\022\034\n\024versionOfOfflineNode\030\002" +
-      " \001(\r\022!\n\014favoredNodes\030\003 \003(\0132\013.ServerName\"" +
-      "\234\001\n\022OpenRegionResponse\022<\n\014openingState\030\001" +
-      " \003(\0162&.OpenRegionResponse.RegionOpeningS",
-      "tate\"H\n\022RegionOpeningState\022\n\n\006OPENED\020\000\022\022" +
-      "\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_OPENING\020\002\"\232" +
-      "\001\n\022CloseRegionRequest\022 \n\006region\030\001 \002(\0132\020." +
-      "RegionSpecifier\022\034\n\024versionOfClosingNode\030" +
-      "\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004true\022&\n\021d" +
-      "estinationServer\030\004 \001(\0132\013.ServerName\"%\n\023C" +
-      "loseRegionResponse\022\016\n\006closed\030\001 \002(\010\"M\n\022Fl" +
-      "ushRegionRequest\022 \n\006region\030\001 \002(\0132\020.Regio" +
-      "nSpecifier\022\025\n\rifOlderThanTs\030\002 \001(\004\"=\n\023Flu" +
-      "shRegionResponse\022\025\n\rlastFlushTime\030\001 \002(\004\022",
-      "\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegionRequest\022 " +
-      "\n\006region\030\001 \002(\0132\020.RegionSpecifier\022\022\n\nspli" +
-      "tPoint\030\002 \001(\014\"\025\n\023SplitRegionResponse\"W\n\024C" +
-      "ompactRegionRequest\022 \n\006region\030\001 \002(\0132\020.Re" +
-      "gionSpecifier\022\r\n\005major\030\002 \001(\010\022\016\n\006family\030\003" +
-      " \001(\014\"\027\n\025CompactRegionResponse\"t\n\023MergeRe" +
-      "gionsRequest\022!\n\007regionA\030\001 \002(\0132\020.RegionSp" +
-      "ecifier\022!\n\007regionB\030\002 \002(\0132\020.RegionSpecifi" +
-      "er\022\027\n\010forcible\030\003 \001(\010:\005false\"\026\n\024MergeRegi" +
-      "onsResponse\"T\n\010WALEntry\022\024\n\003key\030\001 \002(\0132\007.W",
-      "ALKey\022\025\n\rkeyValueBytes\030\002 \003(\014\022\033\n\023associat" +
-      "edCellCount\030\003 \001(\005\"4\n\030ReplicateWALEntryRe" +
-      "quest\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n\031Repli" +
-      "cateWALEntryResponse\"\026\n\024RollWALWriterReq" +
-      "uest\".\n\025RollWALWriterResponse\022\025\n\rregionT" +
-      "oFlush\030\001 \003(\014\"#\n\021StopServerRequest\022\016\n\006rea" +
-      "son\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n\024GetSe" +
-      "rverInfoRequest\"@\n\nServerInfo\022\037\n\nserverN" +
-      "ame\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPort\030\002 \001(" +
-      "\r\"8\n\025GetServerInfoResponse\022\037\n\nserverInfo",
-      "\030\001 \002(\0132\013.ServerInfo2\337\006\n\014AdminService\022>\n\r" +
-      "getRegionInfo\022\025.GetRegionInfoRequest\032\026.G" +
-      "etRegionInfoResponse\022;\n\014getStoreFile\022\024.G" +
-      "etStoreFileRequest\032\025.GetStoreFileRespons" +
-      "e\022D\n\017getOnlineRegion\022\027.GetOnlineRegionRe" +
-      "quest\032\030.GetOnlineRegionResponse\0225\n\nopenR" +
-      "egion\022\022.OpenRegionRequest\032\023.OpenRegionRe" +
-      "sponse\0228\n\013closeRegion\022\023.CloseRegionReque" +
-      "st\032\024.CloseRegionResponse\0228\n\013flushRegion\022" +
-      "\023.FlushRegionRequest\032\024.FlushRegionRespon",
-      "se\0228\n\013splitRegion\022\023.SplitRegionRequest\032\024" +
-      ".SplitRegionResponse\022>\n\rcompactRegion\022\025." +
-      "CompactRegionRequest\032\026.CompactRegionResp" +
-      "onse\022;\n\014mergeRegions\022\024.MergeRegionsReque" +
-      "st\032\025.MergeRegionsResponse\022J\n\021replicateWA" +
-      "LEntry\022\031.ReplicateWALEntryRequest\032\032.Repl" +
-      "icateWALEntryResponse\022\'\n\006replay\022\r.MultiR" +
-      "equest\032\016.MultiResponse\022>\n\rrollWALWriter\022" +
-      "\025.RollWALWriterRequest\032\026.RollWALWriterRe" +
-      "sponse\022>\n\rgetServerInfo\022\025.GetServerInfoR",
-      "equest\032\026.GetServerInfoResponse\0225\n\nstopSe" +
-      "rver\022\022.StopServerRequest\032\023.StopServerRes" +
-      "ponseBA\n*org.apache.hadoop.hbase.protobu" +
-      "f.generatedB\013AdminProtosH\001\210\001\001\240\001\001"
+      "\032\tWAL.proto\"R\n\024GetRegionInfoRequest\022 \n\006r" +
+      "egion\030\001 \002(\0132\020.RegionSpecifier\022\030\n\020compact" +
+      "ion_state\030\002 \001(\010\"\303\001\n\025GetRegionInfoRespons" +
+      "e\022 \n\013region_info\030\001 \002(\0132\013.RegionInfo\022@\n\020c" +
+      "ompaction_state\030\002 \001(\0162&.GetRegionInfoRes" +
+      "ponse.CompactionState\"F\n\017CompactionState" +
+      "\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJOR\020\002\022\023\n\017MAJO" +
+      "R_AND_MINOR\020\003\"G\n\023GetStoreFileRequest\022 \n\006" +
+      "region\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006family",
+      "\030\002 \003(\014\"*\n\024GetStoreFileResponse\022\022\n\nstore_" +
+      "file\030\001 \003(\t\"\030\n\026GetOnlineRegionRequest\";\n\027" +
+      "GetOnlineRegionResponse\022 \n\013region_info\030\001" +
+      " \003(\0132\013.RegionInfo\"\275\001\n\021OpenRegionRequest\022" +
+      "4\n\topen_info\030\001 \003(\0132!.OpenRegionRequest.R" +
+      "egionOpenInfo\032r\n\016RegionOpenInfo\022\033\n\006regio" +
+      "n\030\001 \002(\0132\013.RegionInfo\022\037\n\027version_of_offli" +
+      "ne_node\030\002 \001(\r\022\"\n\rfavored_nodes\030\003 \003(\0132\013.S" +
+      "erverName\"\235\001\n\022OpenRegionResponse\022=\n\ropen" +
+      "ing_state\030\001 \003(\0162&.OpenRegionResponse.Reg",
+      "ionOpeningState\"H\n\022RegionOpeningState\022\n\n" +
+      "\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_" +
+      "OPENING\020\002\"\240\001\n\022CloseRegionRequest\022 \n\006regi" +
+      "on\030\001 \002(\0132\020.RegionSpecifier\022\037\n\027version_of" +
+      "_closing_node\030\002 \001(\r\022\036\n\020transition_in_ZK\030" +
+      "\003 \001(\010:\004true\022\'\n\022destination_server\030\004 \001(\0132" +
+      "\013.ServerName\"%\n\023CloseRegionResponse\022\016\n\006c" +
+      "losed\030\001 \002(\010\"P\n\022FlushRegionRequest\022 \n\006reg" +
+      "ion\030\001 \002(\0132\020.RegionSpecifier\022\030\n\020if_older_" +
+      "than_ts\030\002 \001(\004\"?\n\023FlushRegionResponse\022\027\n\017",
+      "last_flush_time\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"K" +
+      "\n\022SplitRegionRequest\022 \n\006region\030\001 \002(\0132\020.R" +
+      "egionSpecifier\022\023\n\013split_point\030\002 \001(\014\"\025\n\023S" +
+      "plitRegionResponse\"W\n\024CompactRegionReque" +
+      "st\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\r\n\005" +
+      "major\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025CompactRe" +
+      "gionResponse\"v\n\023MergeRegionsRequest\022\"\n\010r" +
+      "egion_a\030\001 \002(\0132\020.RegionSpecifier\022\"\n\010regio" +
+      "n_b\030\002 \002(\0132\020.RegionSpecifier\022\027\n\010forcible\030" +
+      "\003 \001(\010:\005false\"\026\n\024MergeRegionsResponse\"X\n\010",
+      "WALEntry\022\024\n\003key\030\001 \002(\0132\007.WALKey\022\027\n\017key_va" +
+      "lue_bytes\030\002 \003(\014\022\035\n\025associated_cell_count" +
+      "\030\003 \001(\005\"4\n\030ReplicateWALEntryRequest\022\030\n\005en" +
+      "try\030\001 \003(\0132\t.WALEntry\"\033\n\031ReplicateWALEntr" +
+      "yResponse\"\026\n\024RollWALWriterRequest\"0\n\025Rol" +
+      "lWALWriterResponse\022\027\n\017region_to_flush\030\001 " +
+      "\003(\014\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t" +
+      "\"\024\n\022StopServerResponse\"\026\n\024GetServerInfoR" +
+      "equest\"B\n\nServerInfo\022 \n\013server_name\030\001 \002(" +
+      "\0132\013.ServerName\022\022\n\nwebui_port\030\002 \001(\r\"9\n\025Ge",
+      "tServerInfoResponse\022 \n\013server_info\030\001 \002(\013" +
+      "2\013.ServerInfo2\337\006\n\014AdminService\022>\n\rGetReg" +
+      "ionInfo\022\025.GetRegionInfoRequest\032\026.GetRegi" +
+      "onInfoResponse\022;\n\014GetStoreFile\022\024.GetStor" +
+      "eFileRequest\032\025.GetStoreFileResponse\022D\n\017G" +
+      "etOnlineRegion\022\027.GetOnlineRegionRequest\032" +
+      "\030.GetOnlineRegionResponse\0225\n\nOpenRegion\022" +
+      "\022.OpenRegionRequest\032\023.OpenRegionResponse" +
+      "\0228\n\013CloseRegion\022\023.CloseRegionRequest\032\024.C" +
+      "loseRegionResponse\0228\n\013FlushRegion\022\023.Flus",
+      "hRegionRequest\032\024.FlushRegionResponse\0228\n\013" +
+      "SplitRegion\022\023.SplitRegionRequest\032\024.Split" +
+      "RegionResponse\022>\n\rCompactRegion\022\025.Compac" +
+      "tRegionRequest\032\026.CompactRegionResponse\022;" +
+      "\n\014MergeRegions\022\024.MergeRegionsRequest\032\025.M" +
+      "ergeRegionsResponse\022J\n\021ReplicateWALEntry" +
+      "\022\031.ReplicateWALEntryRequest\032\032.ReplicateW" +
+      "ALEntryResponse\022\'\n\006Replay\022\r.MultiRequest" +
+      "\032\016.MultiResponse\022>\n\rRollWALWriter\022\025.Roll" +
+      "WALWriterRequest\032\026.RollWALWriterResponse",
+      "\022>\n\rGetServerInfo\022\025.GetServerInfoRequest" +
+      "\032\026.GetServerInfoResponse\0225\n\nStopServer\022\022" +
+      ".StopServerRequest\032\023.StopServerResponseB" +
+      "A\n*org.apache.hadoop.hbase.protobuf.gene" +
+      "ratedB\013AdminProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java?rev=1501835&r1=1501834&r2=1501835&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java Wed Jul 10 16:31:08 2013
@@ -11,7 +11,7 @@ public final class AggregateProtos {
   public interface AggregateArgumentOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required string interpreterClassName = 1;
+    // required string interpreter_class_name = 1;
     boolean hasInterpreterClassName();
     String getInterpreterClassName();
     
@@ -20,7 +20,7 @@ public final class AggregateProtos {
     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
     
-    // optional bytes interpreterSpecificBytes = 3;
+    // optional bytes interpreter_specific_bytes = 3;
     boolean hasInterpreterSpecificBytes();
     com.google.protobuf.ByteString getInterpreterSpecificBytes();
   }
@@ -53,8 +53,8 @@ public final class AggregateProtos {
     }
     
     private int bitField0_;
-    // required string interpreterClassName = 1;
-    public static final int INTERPRETERCLASSNAME_FIELD_NUMBER = 1;
+    // required string interpreter_class_name = 1;
+    public static final int INTERPRETER_CLASS_NAME_FIELD_NUMBER = 1;
     private java.lang.Object interpreterClassName_;
     public boolean hasInterpreterClassName() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -98,8 +98,8 @@ public final class AggregateProtos {
       return scan_;
     }
     
-    // optional bytes interpreterSpecificBytes = 3;
-    public static final int INTERPRETERSPECIFICBYTES_FIELD_NUMBER = 3;
+    // optional bytes interpreter_specific_bytes = 3;
+    public static final int INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER = 3;
     private com.google.protobuf.ByteString interpreterSpecificBytes_;
     public boolean hasInterpreterSpecificBytes() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -215,7 +215,7 @@ public final class AggregateProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasInterpreterClassName()) {
-        hash = (37 * hash) + INTERPRETERCLASSNAME_FIELD_NUMBER;
+        hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
         hash = (53 * hash) + getInterpreterClassName().hashCode();
       }
       if (hasScan()) {
@@ -223,7 +223,7 @@ public final class AggregateProtos {
         hash = (53 * hash) + getScan().hashCode();
       }
       if (hasInterpreterSpecificBytes()) {
-        hash = (37 * hash) + INTERPRETERSPECIFICBYTES_FIELD_NUMBER;
+        hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
         hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -500,7 +500,7 @@ public final class AggregateProtos {
       
       private int bitField0_;
       
-      // required string interpreterClassName = 1;
+      // required string interpreter_class_name = 1;
       private java.lang.Object interpreterClassName_ = "";
       public boolean hasInterpreterClassName() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -626,7 +626,7 @@ public final class AggregateProtos {
         return scanBuilder_;
       }
       
-      // optional bytes interpreterSpecificBytes = 3;
+      // optional bytes interpreter_specific_bytes = 3;
       private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
       public boolean hasInterpreterSpecificBytes() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -664,12 +664,12 @@ public final class AggregateProtos {
   public interface AggregateResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // repeated bytes firstPart = 1;
+    // repeated bytes first_part = 1;
     java.util.List<com.google.protobuf.ByteString> getFirstPartList();
     int getFirstPartCount();
     com.google.protobuf.ByteString getFirstPart(int index);
     
-    // optional bytes secondPart = 2;
+    // optional bytes second_part = 2;
     boolean hasSecondPart();
     com.google.protobuf.ByteString getSecondPart();
   }
@@ -702,8 +702,8 @@ public final class AggregateProtos {
     }
     
     private int bitField0_;
-    // repeated bytes firstPart = 1;
-    public static final int FIRSTPART_FIELD_NUMBER = 1;
+    // repeated bytes first_part = 1;
+    public static final int FIRST_PART_FIELD_NUMBER = 1;
     private java.util.List<com.google.protobuf.ByteString> firstPart_;
     public java.util.List<com.google.protobuf.ByteString>
         getFirstPartList() {
@@ -716,8 +716,8 @@ public final class AggregateProtos {
       return firstPart_.get(index);
     }
     
-    // optional bytes secondPart = 2;
-    public static final int SECONDPART_FIELD_NUMBER = 2;
+    // optional bytes second_part = 2;
+    public static final int SECOND_PART_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString secondPart_;
     public boolean hasSecondPart() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -810,11 +810,11 @@ public final class AggregateProtos {
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getFirstPartCount() > 0) {
-        hash = (37 * hash) + FIRSTPART_FIELD_NUMBER;
+        hash = (37 * hash) + FIRST_PART_FIELD_NUMBER;
         hash = (53 * hash) + getFirstPartList().hashCode();
       }
       if (hasSecondPart()) {
-        hash = (37 * hash) + SECONDPART_FIELD_NUMBER;
+        hash = (37 * hash) + SECOND_PART_FIELD_NUMBER;
         hash = (53 * hash) + getSecondPart().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -1060,7 +1060,7 @@ public final class AggregateProtos {
       
       private int bitField0_;
       
-      // repeated bytes firstPart = 1;
+      // repeated bytes first_part = 1;
       private java.util.List<com.google.protobuf.ByteString> firstPart_ = java.util.Collections.emptyList();;
       private void ensureFirstPartIsMutable() {
         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -1111,7 +1111,7 @@ public final class AggregateProtos {
         return this;
       }
       
-      // optional bytes secondPart = 2;
+      // optional bytes second_part = 2;
       private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY;
       public boolean hasSecondPart() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -1776,23 +1776,23 @@ public final class AggregateProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\017Aggregate.proto\032\014Client.proto\"h\n\021Aggre" +
-      "gateArgument\022\034\n\024interpreterClassName\030\001 \002" +
-      "(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022 \n\030interpreterSp" +
-      "ecificBytes\030\003 \001(\014\":\n\021AggregateResponse\022\021" +
-      "\n\tfirstPart\030\001 \003(\014\022\022\n\nsecondPart\030\002 \001(\0142\366\002" +
-      "\n\020AggregateService\0220\n\006getMax\022\022.Aggregate" +
-      "Argument\032\022.AggregateResponse\0220\n\006getMin\022\022" +
-      ".AggregateArgument\032\022.AggregateResponse\0220" +
-      "\n\006getSum\022\022.AggregateArgument\032\022.Aggregate" +
-      "Response\0223\n\tgetRowNum\022\022.AggregateArgumen",
-      "t\032\022.AggregateResponse\0220\n\006getAvg\022\022.Aggreg" +
-      "ateArgument\032\022.AggregateResponse\0220\n\006getSt" +
-      "d\022\022.AggregateArgument\032\022.AggregateRespons" +
-      "e\0223\n\tgetMedian\022\022.AggregateArgument\032\022.Agg" +
-      "regateResponseBE\n*org.apache.hadoop.hbas" +
-      "e.protobuf.generatedB\017AggregateProtosH\001\210" +
-      "\001\001\240\001\001"
+      "\n\017Aggregate.proto\032\014Client.proto\"l\n\021Aggre" +
+      "gateArgument\022\036\n\026interpreter_class_name\030\001" +
+      " \002(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022\"\n\032interpreter" +
+      "_specific_bytes\030\003 \001(\014\"<\n\021AggregateRespon" +
+      "se\022\022\n\nfirst_part\030\001 \003(\014\022\023\n\013second_part\030\002 " +
+      "\001(\0142\366\002\n\020AggregateService\0220\n\006GetMax\022\022.Agg" +
+      "regateArgument\032\022.AggregateResponse\0220\n\006Ge" +
+      "tMin\022\022.AggregateArgument\032\022.AggregateResp" +
+      "onse\0220\n\006GetSum\022\022.AggregateArgument\032\022.Agg" +
+      "regateResponse\0223\n\tGetRowNum\022\022.AggregateA",
+      "rgument\032\022.AggregateResponse\0220\n\006GetAvg\022\022." +
+      "AggregateArgument\032\022.AggregateResponse\0220\n" +
+      "\006GetStd\022\022.AggregateArgument\032\022.AggregateR" +
+      "esponse\0223\n\tGetMedian\022\022.AggregateArgument" +
+      "\032\022.AggregateResponseBE\n*org.apache.hadoo" +
+      "p.hbase.protobuf.generatedB\017AggregatePro" +
+      "tosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java?rev=1501835&r1=1501834&r2=1501835&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java Wed Jul 10 16:31:08 2013
@@ -15,7 +15,7 @@ public final class AuthenticationProtos 
     boolean hasId();
     int getId();
     
-    // required int64 expirationDate = 2;
+    // required int64 expiration_date = 2;
     boolean hasExpirationDate();
     long getExpirationDate();
     
@@ -62,8 +62,8 @@ public final class AuthenticationProtos 
       return id_;
     }
     
-    // required int64 expirationDate = 2;
-    public static final int EXPIRATIONDATE_FIELD_NUMBER = 2;
+    // required int64 expiration_date = 2;
+    public static final int EXPIRATION_DATE_FIELD_NUMBER = 2;
     private long expirationDate_;
     public boolean hasExpirationDate() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -193,7 +193,7 @@ public final class AuthenticationProtos 
         hash = (53 * hash) + getId();
       }
       if (hasExpirationDate()) {
-        hash = (37 * hash) + EXPIRATIONDATE_FIELD_NUMBER;
+        hash = (37 * hash) + EXPIRATION_DATE_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getExpirationDate());
       }
       if (hasKey()) {
@@ -482,7 +482,7 @@ public final class AuthenticationProtos 
         return this;
       }
       
-      // required int64 expirationDate = 2;
+      // required int64 expiration_date = 2;
       private long expirationDate_ ;
       public boolean hasExpirationDate() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -549,19 +549,19 @@ public final class AuthenticationProtos 
     boolean hasUsername();
     com.google.protobuf.ByteString getUsername();
     
-    // required int32 keyId = 3;
+    // required int32 key_id = 3;
     boolean hasKeyId();
     int getKeyId();
     
-    // optional int64 issueDate = 4;
+    // optional int64 issue_date = 4;
     boolean hasIssueDate();
     long getIssueDate();
     
-    // optional int64 expirationDate = 5;
+    // optional int64 expiration_date = 5;
     boolean hasExpirationDate();
     long getExpirationDate();
     
-    // optional int64 sequenceNumber = 6;
+    // optional int64 sequence_number = 6;
     boolean hasSequenceNumber();
     long getSequenceNumber();
   }
@@ -680,8 +680,8 @@ public final class AuthenticationProtos 
       return username_;
     }
     
-    // required int32 keyId = 3;
-    public static final int KEYID_FIELD_NUMBER = 3;
+    // required int32 key_id = 3;
+    public static final int KEY_ID_FIELD_NUMBER = 3;
     private int keyId_;
     public boolean hasKeyId() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -690,8 +690,8 @@ public final class AuthenticationProtos 
       return keyId_;
     }
     
-    // optional int64 issueDate = 4;
-    public static final int ISSUEDATE_FIELD_NUMBER = 4;
+    // optional int64 issue_date = 4;
+    public static final int ISSUE_DATE_FIELD_NUMBER = 4;
     private long issueDate_;
     public boolean hasIssueDate() {
       return ((bitField0_ & 0x00000008) == 0x00000008);
@@ -700,8 +700,8 @@ public final class AuthenticationProtos 
       return issueDate_;
     }
     
-    // optional int64 expirationDate = 5;
-    public static final int EXPIRATIONDATE_FIELD_NUMBER = 5;
+    // optional int64 expiration_date = 5;
+    public static final int EXPIRATION_DATE_FIELD_NUMBER = 5;
     private long expirationDate_;
     public boolean hasExpirationDate() {
       return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -710,8 +710,8 @@ public final class AuthenticationProtos 
       return expirationDate_;
     }
     
-    // optional int64 sequenceNumber = 6;
-    public static final int SEQUENCENUMBER_FIELD_NUMBER = 6;
+    // optional int64 sequence_number = 6;
+    public static final int SEQUENCE_NUMBER_FIELD_NUMBER = 6;
     private long sequenceNumber_;
     public boolean hasSequenceNumber() {
       return ((bitField0_ & 0x00000020) == 0x00000020);
@@ -874,19 +874,19 @@ public final class AuthenticationProtos 
         hash = (53 * hash) + getUsername().hashCode();
       }
       if (hasKeyId()) {
-        hash = (37 * hash) + KEYID_FIELD_NUMBER;
+        hash = (37 * hash) + KEY_ID_FIELD_NUMBER;
         hash = (53 * hash) + getKeyId();
       }
       if (hasIssueDate()) {
-        hash = (37 * hash) + ISSUEDATE_FIELD_NUMBER;
+        hash = (37 * hash) + ISSUE_DATE_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getIssueDate());
       }
       if (hasExpirationDate()) {
-        hash = (37 * hash) + EXPIRATIONDATE_FIELD_NUMBER;
+        hash = (37 * hash) + EXPIRATION_DATE_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getExpirationDate());
       }
       if (hasSequenceNumber()) {
-        hash = (37 * hash) + SEQUENCENUMBER_FIELD_NUMBER;
+        hash = (37 * hash) + SEQUENCE_NUMBER_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getSequenceNumber());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -1246,7 +1246,7 @@ public final class AuthenticationProtos 
         return this;
       }
       
-      // required int32 keyId = 3;
+      // required int32 key_id = 3;
       private int keyId_ ;
       public boolean hasKeyId() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
@@ -1267,7 +1267,7 @@ public final class AuthenticationProtos 
         return this;
       }
       
-      // optional int64 issueDate = 4;
+      // optional int64 issue_date = 4;
       private long issueDate_ ;
       public boolean hasIssueDate() {
         return ((bitField0_ & 0x00000008) == 0x00000008);
@@ -1288,7 +1288,7 @@ public final class AuthenticationProtos 
         return this;
       }
       
-      // optional int64 expirationDate = 5;
+      // optional int64 expiration_date = 5;
       private long expirationDate_ ;
       public boolean hasExpirationDate() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -1309,7 +1309,7 @@ public final class AuthenticationProtos 
         return this;
       }
       
-      // optional int64 sequenceNumber = 6;
+      // optional int64 sequence_number = 6;
       private long sequenceNumber_ ;
       public boolean hasSequenceNumber() {
         return ((bitField0_ & 0x00000020) == 0x00000020);
@@ -2917,7 +2917,7 @@ public final class AuthenticationProtos 
     boolean hasUsername();
     String getUsername();
     
-    // optional string authMethod = 2;
+    // optional string auth_method = 2;
     boolean hasAuthMethod();
     String getAuthMethod();
   }
@@ -2982,8 +2982,8 @@ public final class AuthenticationProtos 
       }
     }
     
-    // optional string authMethod = 2;
-    public static final int AUTHMETHOD_FIELD_NUMBER = 2;
+    // optional string auth_method = 2;
+    public static final int AUTH_METHOD_FIELD_NUMBER = 2;
     private java.lang.Object authMethod_;
     public boolean hasAuthMethod() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -3100,7 +3100,7 @@ public final class AuthenticationProtos 
         hash = (53 * hash) + getUsername().hashCode();
       }
       if (hasAuthMethod()) {
-        hash = (37 * hash) + AUTHMETHOD_FIELD_NUMBER;
+        hash = (37 * hash) + AUTH_METHOD_FIELD_NUMBER;
         hash = (53 * hash) + getAuthMethod().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
@@ -3374,7 +3374,7 @@ public final class AuthenticationProtos 
         onChanged();
       }
       
-      // optional string authMethod = 2;
+      // optional string auth_method = 2;
       private java.lang.Object authMethod_ = "";
       public boolean hasAuthMethod() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
@@ -3431,7 +3431,7 @@ public final class AuthenticationProtos 
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse> done);
       
-      public abstract void whoami(
+      public abstract void whoAmI(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request,
           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse> done);
@@ -3450,11 +3450,11 @@ public final class AuthenticationProtos 
         }
         
         @java.lang.Override
-        public  void whoami(
+        public  void whoAmI(
             com.google.protobuf.RpcController controller,
             org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request,
             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse> done) {
-          impl.whoami(controller, request, done);
+          impl.whoAmI(controller, request, done);
         }
         
       };
@@ -3482,7 +3482,7 @@ public final class AuthenticationProtos 
             case 0:
               return impl.getAuthenticationToken(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)request);
             case 1:
-              return impl.whoami(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)request);
+              return impl.whoAmI(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)request);
             default:
               throw new java.lang.AssertionError("Can't get here.");
           }
@@ -3532,7 +3532,7 @@ public final class AuthenticationProtos 
         org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse> done);
     
-    public abstract void whoami(
+    public abstract void whoAmI(
         com.google.protobuf.RpcController controller,
         org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request,
         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse> done);
@@ -3565,7 +3565,7 @@ public final class AuthenticationProtos 
               done));
           return;
         case 1:
-          this.whoami(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)request,
+          this.whoAmI(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)request,
             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse>specializeCallback(
               done));
           return;
@@ -3641,7 +3641,7 @@ public final class AuthenticationProtos 
             org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance()));
       }
       
-      public  void whoami(
+      public  void whoAmI(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request,
           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse> done) {
@@ -3668,7 +3668,7 @@ public final class AuthenticationProtos 
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request)
           throws com.google.protobuf.ServiceException;
       
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoami(
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoAmI(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request)
           throws com.google.protobuf.ServiceException;
@@ -3693,7 +3693,7 @@ public final class AuthenticationProtos 
       }
       
       
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoami(
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoAmI(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest request)
           throws com.google.protobuf.ServiceException {
@@ -3751,23 +3751,23 @@ public final class AuthenticationProtos 
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\024Authentication.proto\"D\n\021Authentication" +
-      "Key\022\n\n\002id\030\001 \002(\005\022\026\n\016expirationDate\030\002 \002(\003\022" +
-      "\013\n\003key\030\003 \002(\014\"\270\001\n\017TokenIdentifier\022#\n\004kind" +
-      "\030\001 \002(\0162\025.TokenIdentifier.Kind\022\020\n\010usernam" +
-      "e\030\002 \002(\014\022\r\n\005keyId\030\003 \002(\005\022\021\n\tissueDate\030\004 \001(" +
-      "\003\022\026\n\016expirationDate\030\005 \001(\003\022\026\n\016sequenceNum" +
-      "ber\030\006 \001(\003\"\034\n\004Kind\022\024\n\020HBASE_AUTH_TOKEN\020\000\"" +
-      ">\n\005Token\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password" +
-      "\030\002 \001(\014\022\017\n\007service\030\003 \001(\014\"\016\n\014TokenRequest\"" +
-      "&\n\rTokenResponse\022\025\n\005token\030\001 \001(\0132\006.Token\"",
-      "\017\n\rWhoAmIRequest\"6\n\016WhoAmIResponse\022\020\n\010us" +
-      "ername\030\001 \001(\t\022\022\n\nauthMethod\030\002 \001(\t2{\n\025Auth" +
-      "enticationService\0227\n\026getAuthenticationTo" +
-      "ken\022\r.TokenRequest\032\016.TokenResponse\022)\n\006wh" +
-      "oami\022\016.WhoAmIRequest\032\017.WhoAmIResponseBJ\n" +
-      "*org.apache.hadoop.hbase.protobuf.genera" +
-      "tedB\024AuthenticationProtosH\001\210\001\001\240\001\001"
+      "\n\024Authentication.proto\"E\n\021Authentication" +
+      "Key\022\n\n\002id\030\001 \002(\005\022\027\n\017expiration_date\030\002 \002(\003" +
+      "\022\013\n\003key\030\003 \002(\014\"\274\001\n\017TokenIdentifier\022#\n\004kin" +
+      "d\030\001 \002(\0162\025.TokenIdentifier.Kind\022\020\n\010userna" +
+      "me\030\002 \002(\014\022\016\n\006key_id\030\003 \002(\005\022\022\n\nissue_date\030\004" +
+      " \001(\003\022\027\n\017expiration_date\030\005 \001(\003\022\027\n\017sequenc" +
+      "e_number\030\006 \001(\003\"\034\n\004Kind\022\024\n\020HBASE_AUTH_TOK" +
+      "EN\020\000\">\n\005Token\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010pas" +
+      "sword\030\002 \001(\014\022\017\n\007service\030\003 \001(\014\"\016\n\014TokenReq" +
+      "uest\"&\n\rTokenResponse\022\025\n\005token\030\001 \001(\0132\006.T",
+      "oken\"\017\n\rWhoAmIRequest\"7\n\016WhoAmIResponse\022" +
+      "\020\n\010username\030\001 \001(\t\022\023\n\013auth_method\030\002 \001(\t2{" +
+      "\n\025AuthenticationService\0227\n\026GetAuthentica" +
+      "tionToken\022\r.TokenRequest\032\016.TokenResponse" +
+      "\022)\n\006WhoAmI\022\016.WhoAmIRequest\032\017.WhoAmIRespo" +
+      "nseBJ\n*org.apache.hadoop.hbase.protobuf." +
+      "generatedB\024AuthenticationProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java?rev=1501835&r1=1501834&r2=1501835&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java Wed Jul 10 16:31:08 2013
@@ -108,7 +108,7 @@ public final class CellProtos {
     boolean hasTimestamp();
     long getTimestamp();
     
-    // optional .CellType cellType = 5;
+    // optional .CellType cell_type = 5;
     boolean hasCellType();
     org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getCellType();
     
@@ -185,8 +185,8 @@ public final class CellProtos {
       return timestamp_;
     }
     
-    // optional .CellType cellType = 5;
-    public static final int CELLTYPE_FIELD_NUMBER = 5;
+    // optional .CellType cell_type = 5;
+    public static final int CELL_TYPE_FIELD_NUMBER = 5;
     private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType cellType_;
     public boolean hasCellType() {
       return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -355,7 +355,7 @@ public final class CellProtos {
         hash = (53 * hash) + hashLong(getTimestamp());
       }
       if (hasCellType()) {
-        hash = (37 * hash) + CELLTYPE_FIELD_NUMBER;
+        hash = (37 * hash) + CELL_TYPE_FIELD_NUMBER;
         hash = (53 * hash) + hashEnum(getCellType());
       }
       if (hasValue()) {
@@ -752,7 +752,7 @@ public final class CellProtos {
         return this;
       }
       
-      // optional .CellType cellType = 5;
+      // optional .CellType cell_type = 5;
       private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType cellType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
       public boolean hasCellType() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -830,7 +830,7 @@ public final class CellProtos {
     boolean hasTimestamp();
     long getTimestamp();
     
-    // optional .CellType keyType = 5;
+    // optional .CellType key_type = 5;
     boolean hasKeyType();
     org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getKeyType();
     
@@ -907,8 +907,8 @@ public final class CellProtos {
       return timestamp_;
     }
     
-    // optional .CellType keyType = 5;
-    public static final int KEYTYPE_FIELD_NUMBER = 5;
+    // optional .CellType key_type = 5;
+    public static final int KEY_TYPE_FIELD_NUMBER = 5;
     private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType keyType_;
     public boolean hasKeyType() {
       return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -1089,7 +1089,7 @@ public final class CellProtos {
         hash = (53 * hash) + hashLong(getTimestamp());
       }
       if (hasKeyType()) {
-        hash = (37 * hash) + KEYTYPE_FIELD_NUMBER;
+        hash = (37 * hash) + KEY_TYPE_FIELD_NUMBER;
         hash = (53 * hash) + hashEnum(getKeyType());
       }
       if (hasValue()) {
@@ -1498,7 +1498,7 @@ public final class CellProtos {
         return this;
       }
       
-      // optional .CellType keyType = 5;
+      // optional .CellType key_type = 5;
       private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType keyType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
       public boolean hasKeyType() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
@@ -1576,17 +1576,17 @@ public final class CellProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\nCell.proto\"u\n\004Cell\022\013\n\003row\030\001 \001(\014\022\016\n\006fam" +
+      "\n\nCell.proto\"v\n\004Cell\022\013\n\003row\030\001 \001(\014\022\016\n\006fam" +
       "ily\030\002 \001(\014\022\021\n\tqualifier\030\003 \001(\014\022\021\n\ttimestam" +
-      "p\030\004 \001(\004\022\033\n\010cellType\030\005 \001(\0162\t.CellType\022\r\n\005" +
-      "value\030\006 \001(\014\"x\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006" +
-      "family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimes" +
-      "tamp\030\004 \001(\004\022\032\n\007keyType\030\005 \001(\0162\t.CellType\022\r" +
-      "\n\005value\030\006 \001(\014*`\n\010CellType\022\013\n\007MINIMUM\020\000\022\007" +
-      "\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021" +
-      "\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B=\n*org.a" +
-      "pache.hadoop.hbase.protobuf.generatedB\nC",
-      "ellProtosH\001\240\001\001"
+      "p\030\004 \001(\004\022\034\n\tcell_type\030\005 \001(\0162\t.CellType\022\r\n" +
+      "\005value\030\006 \001(\014\"y\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n" +
+      "\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttime" +
+      "stamp\030\004 \001(\004\022\033\n\010key_type\030\005 \001(\0162\t.CellType" +
+      "\022\r\n\005value\030\006 \001(\014*`\n\010CellType\022\013\n\007MINIMUM\020\000" +
+      "\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014" +
+      "\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B=\n*org" +
+      ".apache.hadoop.hbase.protobuf.generatedB",
+      "\nCellProtosH\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {