You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/07/21 17:20:43 UTC

[20/39] hbase git commit: HBASE-16263 Move all to do w/ protobuf -- *.proto files and generated classes -- under hbase-protocol

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/ColumnAggregationNullResponseProtocol.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ColumnAggregationNullResponseProtocol.proto b/hbase-protocol/src/main/protobuf/ColumnAggregationNullResponseProtocol.proto
new file mode 100644
index 0000000..b4dc01e
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/ColumnAggregationNullResponseProtocol.proto
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Coprocessor test
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "ColumnAggregationWithNullResponseProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+// use unique names for messages in ColumnAggregationXXX.protos due to a bug in
+// protoc or hadoop's protoc compiler.
+message ColumnAggregationNullResponseSumRequest {
+  required bytes family = 1;
+  optional bytes qualifier = 2;
+}
+
+message ColumnAggregationNullResponseSumResponse {
+  optional int64 sum = 1;
+}
+
+service ColumnAggregationServiceNullResponse {
+  rpc sum(ColumnAggregationNullResponseSumRequest)
+    returns(ColumnAggregationNullResponseSumResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/ColumnAggregationProtocol.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ColumnAggregationProtocol.proto b/hbase-protocol/src/main/protobuf/ColumnAggregationProtocol.proto
new file mode 100644
index 0000000..ad1acda
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/ColumnAggregationProtocol.proto
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Coprocessor test
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "ColumnAggregationProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+message SumRequest {
+  required bytes family = 1;
+  optional bytes qualifier = 2;
+}
+
+message SumResponse {
+  required int64 sum = 1;
+}
+
+service ColumnAggregationService {
+  rpc sum(SumRequest) returns(SumResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/ColumnAggregationWithErrorsProtocol.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ColumnAggregationWithErrorsProtocol.proto b/hbase-protocol/src/main/protobuf/ColumnAggregationWithErrorsProtocol.proto
new file mode 100644
index 0000000..7808949
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/ColumnAggregationWithErrorsProtocol.proto
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Coprocessor test
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "ColumnAggregationWithErrorsProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+// use unique names for messages in ColumnAggregationXXX.protos due to a bug in
+// protoc or hadoop's protoc compiler.
+message ColumnAggregationWithErrorsSumRequest {
+  required bytes family = 1;
+  optional bytes qualifier = 2;
+}
+
+message ColumnAggregationWithErrorsSumResponse {
+  required int64 sum = 1;
+}
+
+service ColumnAggregationServiceWithErrors {
+  rpc sum(ColumnAggregationWithErrorsSumRequest)
+    returns(ColumnAggregationWithErrorsSumResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/ColumnSchemaMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ColumnSchemaMessage.proto b/hbase-protocol/src/main/protobuf/ColumnSchemaMessage.proto
new file mode 100644
index 0000000..05e33b6
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/ColumnSchemaMessage.proto
@@ -0,0 +1,31 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message ColumnSchema {
+  optional string name = 1;
+  message Attribute {
+    required string name = 1;
+    required string value = 2;
+  }
+  repeated Attribute attrs = 2;
+  // optional helpful encodings of commonly used attributes
+  optional int32 ttl = 3;
+  optional int32 maxVersions = 4;
+  optional string compression = 5;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/DummyRegionServerEndpoint.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/DummyRegionServerEndpoint.proto b/hbase-protocol/src/main/protobuf/DummyRegionServerEndpoint.proto
new file mode 100644
index 0000000..539f7da
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/DummyRegionServerEndpoint.proto
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package hbase.test.pb;
+
+// Coprocessor test
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "DummyRegionServerEndpointProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+message DummyRequest {
+}
+
+message DummyResponse {
+  required string value = 1;
+}
+
+service DummyService {
+  rpc dummyCall(DummyRequest) returns(DummyResponse);
+  rpc dummyThrow(DummyRequest) returns(DummyResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/Examples.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Examples.proto b/hbase-protocol/src/main/protobuf/Examples.proto
new file mode 100644
index 0000000..ed9ed07
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/Examples.proto
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated";
+option java_outer_classname = "ExampleProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message CountRequest {
+}
+
+message CountResponse {
+  required int64 count = 1 [default = 0];
+}
+
+service RowCountService {
+  rpc getRowCount(CountRequest)
+    returns (CountResponse);
+  rpc getKeyValueCount(CountRequest)
+    returns (CountResponse);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/IncrementCounterProcessor.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/IncrementCounterProcessor.proto b/hbase-protocol/src/main/protobuf/IncrementCounterProcessor.proto
new file mode 100644
index 0000000..b8c77ca
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/IncrementCounterProcessor.proto
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "IncrementCounterProcessorTestProtos";
+option java_generate_equals_and_hash = true;
+
+message IncCounterProcessorRequest {
+  required bytes row = 1;
+  required int32 counter = 2;
+}
+
+message IncCounterProcessorResponse {
+  required int32 response = 1;
+}
+
+message FriendsOfFriendsProcessorRequest {
+  required bytes person = 1;
+  required bytes row = 2;
+  repeated string result = 3;
+}
+
+message FriendsOfFriendsProcessorResponse {
+  repeated string result = 1;
+}
+
+message RowSwapProcessorRequest {
+  required bytes row1 = 1;
+  required bytes row2 = 2;
+}
+
+message RowSwapProcessorResponse {
+}
+
+message TimeoutProcessorRequest {
+  required bytes row = 1;
+}
+
+message TimeoutProcessorResponse {
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/NamespacePropertiesMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/NamespacePropertiesMessage.proto b/hbase-protocol/src/main/protobuf/NamespacePropertiesMessage.proto
new file mode 100644
index 0000000..fbecb71
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/NamespacePropertiesMessage.proto
@@ -0,0 +1,26 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message NamespaceProperties {
+  message Property {
+    required string key = 1;
+    required string value = 2;
+  }
+  repeated Property props = 1;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/NamespacesMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/NamespacesMessage.proto b/hbase-protocol/src/main/protobuf/NamespacesMessage.proto
new file mode 100644
index 0000000..2c5cbb3
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/NamespacesMessage.proto
@@ -0,0 +1,22 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message Namespaces {
+	repeated string namespace = 1;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/PingProtocol.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/PingProtocol.proto b/hbase-protocol/src/main/protobuf/PingProtocol.proto
new file mode 100644
index 0000000..ef63ee0
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/PingProtocol.proto
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Coprocessor test
+option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
+option java_outer_classname = "PingProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+message PingRequest {
+}
+
+message PingResponse {
+  required string pong = 1;
+}
+
+message CountRequest {
+}
+
+message CountResponse {
+  required int32 count = 1;
+}
+
+message IncrementCountRequest {
+  required int32 diff = 1;
+}
+
+message IncrementCountResponse {
+  required int32 count = 1;
+}
+
+message HelloRequest {
+  optional string name = 1;
+}
+
+message HelloResponse {
+  optional string response = 1;
+}
+
+message NoopRequest {
+}
+
+message NoopResponse {
+}
+
+service PingService {
+  rpc ping(PingRequest) returns(PingResponse);
+  rpc count(CountRequest) returns(CountResponse);
+  rpc increment(IncrementCountRequest) returns(IncrementCountResponse);
+  rpc hello(HelloRequest) returns(HelloResponse);
+  rpc noop(NoopRequest) returns(NoopResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/ScannerMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ScannerMessage.proto b/hbase-protocol/src/main/protobuf/ScannerMessage.proto
new file mode 100644
index 0000000..185eac6
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/ScannerMessage.proto
@@ -0,0 +1,32 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message Scanner {
+  optional bytes startRow = 1;
+  optional bytes endRow = 2;
+  repeated bytes columns = 3;
+  optional int32 batch = 4;
+  optional int64 startTime = 5;
+  optional int64 endTime = 6;
+  optional int32 maxVersions = 7;
+  optional string filter = 8;
+  optional int32 caching = 9;     // specifies REST scanner caching
+  repeated string labels = 10;
+  optional bool cacheBlocks = 11; // server side block caching hint
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/SparkFilter.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/SparkFilter.proto b/hbase-protocol/src/main/protobuf/SparkFilter.proto
new file mode 100644
index 0000000..e16c551
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/SparkFilter.proto
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file contains protocol buffers that are used for Spark filters
+// over in the hbase-spark module
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.spark.protobuf.generated";
+option java_outer_classname = "SparkFilterProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message SQLPredicatePushDownCellToColumnMapping {
+  required bytes column_family = 1;
+  required bytes qualifier = 2;
+  required string column_name = 3;
+}
+
+message SQLPredicatePushDownFilter {
+  required string dynamic_logic_expression = 1;
+  repeated bytes value_from_query_array = 2;
+  repeated SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
+  optional string encoderClassName = 4;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/StorageClusterStatusMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/StorageClusterStatusMessage.proto b/hbase-protocol/src/main/protobuf/StorageClusterStatusMessage.proto
new file mode 100644
index 0000000..34dc1c3
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/StorageClusterStatusMessage.proto
@@ -0,0 +1,51 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message StorageClusterStatus {
+  message Region {
+    required bytes name = 1;
+    optional int32 stores = 2;
+    optional int32 storefiles = 3;
+    optional int32 storefileSizeMB = 4;
+    optional int32 memstoreSizeMB = 5;
+    optional int32 storefileIndexSizeMB = 6;
+    optional int64 readRequestsCount = 7;
+    optional int64 writeRequestsCount = 8;
+    optional int32 rootIndexSizeKB = 9;
+    optional int32 totalStaticIndexSizeKB = 10;
+    optional int32 totalStaticBloomSizeKB = 11;
+    optional int64 totalCompactingKVs = 12;
+    optional int64 currentCompactedKVs = 13;
+  }
+  message Node {
+    required string name = 1;    // name:port
+    optional int64 startCode = 2;
+    optional int64 requests = 3;
+    optional int32 heapSizeMB = 4;
+    optional int32 maxHeapSizeMB = 5;
+    repeated Region regions = 6;
+  }
+  // node status
+  repeated Node liveNodes = 1;
+  repeated string deadNodes = 2;
+  // summary statistics
+  optional int32 regions = 3; 
+  optional int64 requests = 4;
+  optional double averageLoad = 5;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/TableInfoMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/TableInfoMessage.proto b/hbase-protocol/src/main/protobuf/TableInfoMessage.proto
new file mode 100644
index 0000000..674499c
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/TableInfoMessage.proto
@@ -0,0 +1,30 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message TableInfo {
+  required string name = 1;
+  message Region {
+    required string name = 1;
+    optional bytes startKey = 2;
+    optional bytes endKey = 3;
+    optional int64 id = 4;
+    optional string location = 5;
+  }
+  repeated Region regions = 2;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/TableListMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/TableListMessage.proto b/hbase-protocol/src/main/protobuf/TableListMessage.proto
new file mode 100644
index 0000000..fbd76ea
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/TableListMessage.proto
@@ -0,0 +1,22 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message TableList {
+  repeated string name = 1;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/TableSchemaMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/TableSchemaMessage.proto b/hbase-protocol/src/main/protobuf/TableSchemaMessage.proto
new file mode 100644
index 0000000..47a4da5
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/TableSchemaMessage.proto
@@ -0,0 +1,33 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import "ColumnSchemaMessage.proto";
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message TableSchema {
+  optional string name = 1;
+  message Attribute {
+    required string name = 1;
+    required string value = 2;
+  }  
+  repeated Attribute attrs = 2;
+  repeated ColumnSchema columns = 3;
+  // optional helpful encodings of commonly used attributes
+  optional bool inMemory = 4;
+  optional bool readOnly = 5;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/TestProcedure.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/TestProcedure.proto b/hbase-protocol/src/main/protobuf/TestProcedure.proto
new file mode 100644
index 0000000..de74f36
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/TestProcedure.proto
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+option java_package = "org.apache.hadoop.hbase.ipc.protobuf.generated";
+option java_outer_classname = "TestProcedureProtos";
+option java_generic_services = true;
+
+message TestTableDDLStateData {
+  required string table_name = 1;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/VersionMessage.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/VersionMessage.proto b/hbase-protocol/src/main/protobuf/VersionMessage.proto
new file mode 100644
index 0000000..cc107b3
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/VersionMessage.proto
@@ -0,0 +1,26 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.hadoop.hbase.rest.protobuf.generated;
+
+message Version {
+  optional string restVersion = 1;
+  optional string jvmVersion = 2;
+  optional string osVersion = 3;
+  optional string serverVersion = 4;
+  optional string jerseyVersion = 5;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/test.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/test.proto b/hbase-protocol/src/main/protobuf/test.proto
new file mode 100644
index 0000000..566b04b
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/test.proto
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hbase.ipc.protobuf.generated";
+option java_outer_classname = "TestProtos";
+option java_generate_equals_and_hash = true;
+
+message EmptyRequestProto {
+}
+
+message EmptyResponseProto {
+}
+
+message EchoRequestProto {
+  required string message = 1;
+}
+
+message EchoResponseProto {
+  required string message = 1;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-protocol/src/main/protobuf/test_rpc_service.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/test_rpc_service.proto b/hbase-protocol/src/main/protobuf/test_rpc_service.proto
new file mode 100644
index 0000000..4ed0380
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/test_rpc_service.proto
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+option java_package = "org.apache.hadoop.hbase.ipc.protobuf.generated";
+option java_outer_classname = "TestRpcServiceProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+import "test.proto";
+
+
+/**
+ * A protobuf service for use in tests
+ */
+service TestProtobufRpcProto {
+  rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+  rpc echo(EchoRequestProto) returns (EchoResponseProto);
+  rpc error(EmptyRequestProto) returns (EmptyResponseProto);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index a66be01..2e1b245 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -188,6 +188,11 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-protocol</artifactId>
+      <type>jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-annotations</artifactId>
       <exclusions>
         <exclusion>
@@ -334,56 +339,6 @@
         <surefire.skipSecondPart>true</surefire.skipSecondPart>
       </properties>
     </profile>
-    <profile>
-      <id>compile-protobuf</id>
-      <activation>
-        <property>
-          <name>compile-protobuf</name>
-        </property>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-maven-plugins</artifactId>
-            <executions>
-              <execution>
-                <id>compile-protoc</id>
-                <phase>generate-sources</phase>
-                <goals>
-                  <goal>protoc</goal>
-                </goals>
-                <configuration>
-                  <imports>
-                    <param>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
-                    </param>
-                  </imports>
-                  <source>
-                    <!-- These should be under src/main/protobuf -->
-                    <directory>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
-                    </directory>
-                    <includes>
-                      <include>CellMessage.proto</include>
-                      <include>CellSetMessage.proto</include>
-                      <include>ColumnSchemaMessage.proto</include>
-                      <include>NamespacePropertiesMessage.proto</include>
-                      <include>NamespacesMessage.proto</include>
-                      <include>ScannerMessage.proto</include>
-                      <include>StorageClusterStatusMessage.proto</include>
-                      <include>TableInfoMessage.proto</include>
-                      <include>TableListMessage.proto</include>
-                      <include>TableSchemaMessage.proto</include>
-                      <include>VersionMessage.proto</include>
-                    </includes>
-                  </source>
-                  <output>${basedir}/src/main/java/</output>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
     <!-- Hadoop-specific dependencies -->
     <!-- profile for building against Hadoop 2.0.x
          This is the default.

http://git-wip-us.apache.org/repos/asf/hbase/blob/9d740f7b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
deleted file mode 100644
index 4c859e1..0000000
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
+++ /dev/null
@@ -1,731 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: CellMessage.proto
-
-package org.apache.hadoop.hbase.rest.protobuf.generated;
-
-public final class CellMessage {
-  private CellMessage() {}
-  public static void registerAllExtensions(
-      com.google.protobuf.ExtensionRegistry registry) {
-  }
-  public interface CellOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // optional bytes row = 1;
-    /**
-     * <code>optional bytes row = 1;</code>
-     *
-     * <pre>
-     * unused if Cell is in a CellSet
-     * </pre>
-     */
-    boolean hasRow();
-    /**
-     * <code>optional bytes row = 1;</code>
-     *
-     * <pre>
-     * unused if Cell is in a CellSet
-     * </pre>
-     */
-    com.google.protobuf.ByteString getRow();
-
-    // optional bytes column = 2;
-    /**
-     * <code>optional bytes column = 2;</code>
-     */
-    boolean hasColumn();
-    /**
-     * <code>optional bytes column = 2;</code>
-     */
-    com.google.protobuf.ByteString getColumn();
-
-    // optional int64 timestamp = 3;
-    /**
-     * <code>optional int64 timestamp = 3;</code>
-     */
-    boolean hasTimestamp();
-    /**
-     * <code>optional int64 timestamp = 3;</code>
-     */
-    long getTimestamp();
-
-    // optional bytes data = 4;
-    /**
-     * <code>optional bytes data = 4;</code>
-     */
-    boolean hasData();
-    /**
-     * <code>optional bytes data = 4;</code>
-     */
-    com.google.protobuf.ByteString getData();
-  }
-  /**
-   * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
-   */
-  public static final class Cell extends
-      com.google.protobuf.GeneratedMessage
-      implements CellOrBuilder {
-    // Use Cell.newBuilder() to construct.
-    private Cell(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
-      super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final Cell defaultInstance;
-    public static Cell getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public Cell getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private Cell(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              row_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              bitField0_ |= 0x00000002;
-              column_ = input.readBytes();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              timestamp_ = input.readInt64();
-              break;
-            }
-            case 34: {
-              bitField0_ |= 0x00000008;
-              data_ = input.readBytes();
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<Cell> PARSER =
-        new com.google.protobuf.AbstractParser<Cell>() {
-      public Cell parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new Cell(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<Cell> getParserForType() {
-      return PARSER;
-    }
-
-    private int bitField0_;
-    // optional bytes row = 1;
-    public static final int ROW_FIELD_NUMBER = 1;
-    private com.google.protobuf.ByteString row_;
-    /**
-     * <code>optional bytes row = 1;</code>
-     *
-     * <pre>
-     * unused if Cell is in a CellSet
-     * </pre>
-     */
-    public boolean hasRow() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>optional bytes row = 1;</code>
-     *
-     * <pre>
-     * unused if Cell is in a CellSet
-     * </pre>
-     */
-    public com.google.protobuf.ByteString getRow() {
-      return row_;
-    }
-
-    // optional bytes column = 2;
-    public static final int COLUMN_FIELD_NUMBER = 2;
-    private com.google.protobuf.ByteString column_;
-    /**
-     * <code>optional bytes column = 2;</code>
-     */
-    public boolean hasColumn() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <code>optional bytes column = 2;</code>
-     */
-    public com.google.protobuf.ByteString getColumn() {
-      return column_;
-    }
-
-    // optional int64 timestamp = 3;
-    public static final int TIMESTAMP_FIELD_NUMBER = 3;
-    private long timestamp_;
-    /**
-     * <code>optional int64 timestamp = 3;</code>
-     */
-    public boolean hasTimestamp() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    /**
-     * <code>optional int64 timestamp = 3;</code>
-     */
-    public long getTimestamp() {
-      return timestamp_;
-    }
-
-    // optional bytes data = 4;
-    public static final int DATA_FIELD_NUMBER = 4;
-    private com.google.protobuf.ByteString data_;
-    /**
-     * <code>optional bytes data = 4;</code>
-     */
-    public boolean hasData() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    /**
-     * <code>optional bytes data = 4;</code>
-     */
-    public com.google.protobuf.ByteString getData() {
-      return data_;
-    }
-
-    private void initFields() {
-      row_ = com.google.protobuf.ByteString.EMPTY;
-      column_ = com.google.protobuf.ByteString.EMPTY;
-      timestamp_ = 0L;
-      data_ = com.google.protobuf.ByteString.EMPTY;
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, row_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(2, column_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt64(3, timestamp_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBytes(4, data_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, row_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(2, column_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(3, timestamp_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(4, data_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
-     */
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        row_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        column_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        timestamp_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        data_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell build() {
-        org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildPartial() {
-        org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.row_ = row_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.column_ = column_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.timestamp_ = timestamp_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.data_ = data_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) {
-          return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell other) {
-        if (other == org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance()) return this;
-        if (other.hasRow()) {
-          setRow(other.getRow());
-        }
-        if (other.hasColumn()) {
-          setColumn(other.getColumn());
-        }
-        if (other.hasTimestamp()) {
-          setTimestamp(other.getTimestamp());
-        }
-        if (other.hasData()) {
-          setData(other.getData());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      // optional bytes row = 1;
-      private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
-      /**
-       * <code>optional bytes row = 1;</code>
-       *
-       * <pre>
-       * unused if Cell is in a CellSet
-       * </pre>
-       */
-      public boolean hasRow() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>optional bytes row = 1;</code>
-       *
-       * <pre>
-       * unused if Cell is in a CellSet
-       * </pre>
-       */
-      public com.google.protobuf.ByteString getRow() {
-        return row_;
-      }
-      /**
-       * <code>optional bytes row = 1;</code>
-       *
-       * <pre>
-       * unused if Cell is in a CellSet
-       * </pre>
-       */
-      public Builder setRow(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        row_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional bytes row = 1;</code>
-       *
-       * <pre>
-       * unused if Cell is in a CellSet
-       * </pre>
-       */
-      public Builder clearRow() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        row_ = getDefaultInstance().getRow();
-        onChanged();
-        return this;
-      }
-
-      // optional bytes column = 2;
-      private com.google.protobuf.ByteString column_ = com.google.protobuf.ByteString.EMPTY;
-      /**
-       * <code>optional bytes column = 2;</code>
-       */
-      public boolean hasColumn() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      /**
-       * <code>optional bytes column = 2;</code>
-       */
-      public com.google.protobuf.ByteString getColumn() {
-        return column_;
-      }
-      /**
-       * <code>optional bytes column = 2;</code>
-       */
-      public Builder setColumn(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        column_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional bytes column = 2;</code>
-       */
-      public Builder clearColumn() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        column_ = getDefaultInstance().getColumn();
-        onChanged();
-        return this;
-      }
-
-      // optional int64 timestamp = 3;
-      private long timestamp_ ;
-      /**
-       * <code>optional int64 timestamp = 3;</code>
-       */
-      public boolean hasTimestamp() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      /**
-       * <code>optional int64 timestamp = 3;</code>
-       */
-      public long getTimestamp() {
-        return timestamp_;
-      }
-      /**
-       * <code>optional int64 timestamp = 3;</code>
-       */
-      public Builder setTimestamp(long value) {
-        bitField0_ |= 0x00000004;
-        timestamp_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int64 timestamp = 3;</code>
-       */
-      public Builder clearTimestamp() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        timestamp_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // optional bytes data = 4;
-      private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
-      /**
-       * <code>optional bytes data = 4;</code>
-       */
-      public boolean hasData() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      /**
-       * <code>optional bytes data = 4;</code>
-       */
-      public com.google.protobuf.ByteString getData() {
-        return data_;
-      }
-      /**
-       * <code>optional bytes data = 4;</code>
-       */
-      public Builder setData(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000008;
-        data_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional bytes data = 4;</code>
-       */
-      public Builder clearData() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        data_ = getDefaultInstance().getData();
-        onChanged();
-        return this;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
-    }
-
-    static {
-      defaultInstance = new Cell(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
-  }
-
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
-
-  public static com.google.protobuf.Descriptors.FileDescriptor
-      getDescriptor() {
-    return descriptor;
-  }
-  private static com.google.protobuf.Descriptors.FileDescriptor
-      descriptor;
-  static {
-    java.lang.String[] descriptorData = {
-      "\n\021CellMessage.proto\022/org.apache.hadoop.h" +
-      "base.rest.protobuf.generated\"D\n\004Cell\022\013\n\003" +
-      "row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimestamp\030\003" +
-      " \001(\003\022\014\n\004data\030\004 \001(\014"
-    };
-    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
-      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
-        public com.google.protobuf.ExtensionRegistry assignDescriptors(
-            com.google.protobuf.Descriptors.FileDescriptor root) {
-          descriptor = root;
-          internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor =
-            getDescriptor().getMessageTypes().get(0);
-          internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
-              new java.lang.String[] { "Row", "Column", "Timestamp", "Data", });
-          return null;
-        }
-      };
-    com.google.protobuf.Descriptors.FileDescriptor
-      .internalBuildGeneratedFileFrom(descriptorData,
-        new com.google.protobuf.Descriptors.FileDescriptor[] {
-        }, assigner);
-  }
-
-  // @@protoc_insertion_point(outer_class_scope)
-}