You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2017/08/10 01:20:48 UTC
[01/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Repository: hbase
Updated Branches:
refs/heads/branch-1 036fce3cf -> 685ab1906
refs/heads/branch-1.4 8d826b8fc -> b6ff1d5e8
refs/heads/branch-2 f855b5165 -> a90217555
refs/heads/master 794a3b104 -> d0941127d
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c216995..32f62ee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStor
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest;
@@ -185,7 +186,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Repor
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User;
@@ -903,7 +903,7 @@ public class MasterRpcServices extends RSRpcServices
List<TableName> tableNameList = null;
if (req.getTableNamesCount() > 0) {
tableNameList = new ArrayList<TableName>(req.getTableNamesCount());
- for (HBaseProtos.TableName tableNamePB: req.getTableNamesList()) {
+ for (TableProtos.TableName tableNamePB: req.getTableNamesList()) {
tableNameList.add(ProtobufUtil.toTableName(tableNamePB));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
index 0f1f495..2f769f3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
* Watch the current snapshot under process
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
index ff59ea1..6f8bcd4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
import org.apache.hadoop.hbase.master.handler.CreateTableHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
index 8e40a7d..b87c826 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.FSUtils;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
index fa4245a..81a3bb4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.procedure.Procedure;
import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.util.Pair;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
index 73b2198..bb54fc3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
index 80f151d..1185073 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
import org.apache.hadoop.hbase.master.handler.TableEventHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
index 4e0181f..3efe984 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
@@ -62,8 +62,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinatorRpcs;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.hadoop.hbase.security.AccessDeniedException;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 7b45610..39387cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.master.TableLockManager;
import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
new file mode 100644
index 0000000..6ebc912
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+
+/**
+ * This interface is used for the tracking and enforcement of Deletes during the course of a Get or
+ * Scan operation.
+ * <p>
+ * This class is utilized through three methods:
+ * <ul>
+ * <li>{@link #add} when encountering a Delete</li>
+ * <li>{@link #isDeleted} when checking if a Put KeyValue has been deleted</li>
+ * <li>{@link #update} when reaching the end of a StoreFile</li>
+ * </ul>
+ */
+@InterfaceAudience.Private
+public interface DeleteTracker {
+
+ /**
+ * Add the specified cell to the list of deletes to check against for this row operation.
+ * <p>
+ * This is called when a Delete is encountered in a StoreFile.
+ * @param cell - the delete cell
+ */
+ void add(Cell cell);
+
+ /**
+ * Check if the specified cell buffer has been deleted by a previously seen delete.
+ * @param cell - current cell to check if deleted by a previously seen delete
+ * @return deleteResult The result tells whether the KeyValue is deleted and why
+ */
+ DeleteResult isDeleted(Cell cell);
+
+ /**
+ * @return true if there are no current delete, false otherwise
+ */
+ boolean isEmpty();
+
+ /**
+ * Called at the end of every StoreFile.
+ * <p>
+ * Many optimized implementations of Trackers will require an update at when the end of each
+ * StoreFile is reached.
+ */
+ void update();
+
+ /**
+ * Called between rows.
+ * <p>
+ * This clears everything as if a new DeleteTracker was instantiated.
+ */
+ void reset();
+
+ /**
+ * Return codes for comparison of two Deletes.
+ * <p>
+ * The codes tell the merging function what to do.
+ * <p>
+ * INCLUDE means add the specified Delete to the merged list. NEXT means move to the next element
+ * in the specified list(s).
+ */
+ enum DeleteCompare {
+ INCLUDE_OLD_NEXT_OLD,
+ INCLUDE_OLD_NEXT_BOTH,
+ INCLUDE_NEW_NEXT_NEW,
+ INCLUDE_NEW_NEXT_BOTH,
+ NEXT_OLD,
+ NEXT_NEW
+ }
+
+ /**
+ * Returns codes for delete result. The codes tell the ScanQueryMatcher whether the kv is deleted
+ * and why. Based on the delete result, the ScanQueryMatcher will decide the next operation
+ */
+ enum DeleteResult {
+ FAMILY_DELETED, // The KeyValue is deleted by a delete family.
+ FAMILY_VERSION_DELETED, // The KeyValue is deleted by a delete family version.
+ COLUMN_DELETED, // The KeyValue is deleted by a delete column.
+ VERSION_DELETED, // The KeyValue is deleted by a version delete.
+ NOT_DELETED
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d6ad5a4..4a4723d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -156,7 +156,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 96855b8..dbe8521 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -73,9 +73,9 @@ import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.Region.Operation;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java
index 49e5f42..0d083b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java
deleted file mode 100644
index 4e1ba4e..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver.querymatcher;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-
-/**
- * This interface is used for the tracking and enforcement of Deletes during the course of a Get or
- * Scan operation.
- * <p>
- * This class is utilized through three methods:
- * <ul>
- * <li>{@link #add} when encountering a Delete</li>
- * <li>{@link #isDeleted} when checking if a Put KeyValue has been deleted</li>
- * <li>{@link #update} when reaching the end of a StoreFile</li>
- * </ul>
- */
-@InterfaceAudience.Private
-public interface DeleteTracker {
-
- /**
- * Add the specified cell to the list of deletes to check against for this row operation.
- * <p>
- * This is called when a Delete is encountered in a StoreFile.
- * @param cell - the delete cell
- */
- void add(Cell cell);
-
- /**
- * Check if the specified cell buffer has been deleted by a previously seen delete.
- * @param cell - current cell to check if deleted by a previously seen delete
- * @return deleteResult The result tells whether the KeyValue is deleted and why
- */
- DeleteResult isDeleted(Cell cell);
-
- /**
- * @return true if there are no current delete, false otherwise
- */
- boolean isEmpty();
-
- /**
- * Called at the end of every StoreFile.
- * <p>
- * Many optimized implementations of Trackers will require an update at when the end of each
- * StoreFile is reached.
- */
- void update();
-
- /**
- * Called between rows.
- * <p>
- * This clears everything as if a new DeleteTracker was instantiated.
- */
- void reset();
-
- /**
- * Return codes for comparison of two Deletes.
- * <p>
- * The codes tell the merging function what to do.
- * <p>
- * INCLUDE means add the specified Delete to the merged list. NEXT means move to the next element
- * in the specified list(s).
- */
- enum DeleteCompare {
- INCLUDE_OLD_NEXT_OLD,
- INCLUDE_OLD_NEXT_BOTH,
- INCLUDE_NEW_NEXT_NEW,
- INCLUDE_NEW_NEXT_BOTH,
- NEXT_OLD,
- NEXT_NEW
- }
-
- /**
- * Returns codes for delete result. The codes tell the ScanQueryMatcher whether the kv is deleted
- * and why. Based on the delete result, the ScanQueryMatcher will decide the next operation
- */
- enum DeleteResult {
- FAMILY_DELETED, // The KeyValue is deleted by a delete family.
- FAMILY_VERSION_DELETED, // The KeyValue is deleted by a delete family version.
- COLUMN_DELETED, // The KeyValue is deleted by a delete column.
- VERSION_DELETED, // The KeyValue is deleted by a version delete.
- NOT_DELETED
- }
-
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java
index b20abee..ad946e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java
index 2074450..b253a52 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java
@@ -32,10 +32,11 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.io.TimeRange;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java
index 9830fae..5c8db05 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java
index 385fc18..4aacbaa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java
index 8f5059f..512f1a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java
index f3552c3..f51404f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java
@@ -22,6 +22,7 @@ import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
index 76dcad1..45ca5ac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java
@@ -26,10 +26,11 @@ import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
/**
* A query matcher that is specifically designed for the scan case.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java
index 9ae3975..b78ee05 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index af2a496..5b2eb8a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.procedure.ProcedureMember;
import org.apache.hadoop.hbase.procedure.Subprocedure;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Region.FlushResult;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
index 99a0261..0f9ac19 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
import org.apache.hadoop.hbase.procedure.Subprocedure;
import org.apache.hadoop.hbase.procedure.SubprocedureFactory;
import org.apache.hadoop.hbase.procedure.ZKProcedureMemberRpcs;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 722d9eb..dd15c3c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -91,10 +91,10 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.Region;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index ac9dc79..ed41371 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -90,13 +90,13 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService;
import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.Superusers;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
index e9ca7dc..c2da8e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import java.util.Arrays;
import java.util.Locale;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 6ec0b90..97d0603 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.io.FileLink;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.WALLink;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.util.FSUtils;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 39887c6..47b3c34 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
index b735d64..cdc1b1f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.access.TablePermission;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index bd3d0c2..e4c4c59 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index e4cac95..8e5b21d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.HRegion;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index 89d0ba2..a5afb91 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 82ec282..3b918a0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.ByteStringer;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
index 0f09f06..56699d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.HFileLink;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
index f5a0d9e..6099a21 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
@@ -24,7 +24,7 @@
import="org.apache.hadoop.hbase.client.HConnectionManager"
import="org.apache.hadoop.hbase.master.HMaster"
import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
- import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription"
+ import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription"
import="org.apache.hadoop.util.StringUtils"
import="org.apache.hadoop.hbase.TableName"
import="org.apache.hadoop.hbase.HBaseConfiguration" %>
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
index 289a72c..dceed8e 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
@@ -27,7 +27,7 @@
import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.client.Admin"
import="org.apache.hadoop.hbase.master.HMaster"
- import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription"
+ import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription"
import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
import="org.apache.hadoop.hbase.TableName"
import="org.apache.hadoop.util.StringUtils" %>
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
index 85c3ddf..541cb89 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 0851267..54745f5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -59,10 +59,10 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 4514ae2..7b5c415 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
index e29bef8..7d1595e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
+import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.MD5Hash;
@@ -240,7 +240,7 @@ public class TestHRegionInfo {
// test convert RegionInfo without replicaId
RegionInfo info = RegionInfo.newBuilder()
- .setTableName(HBaseProtos.TableName.newBuilder()
+ .setTableName(TableProtos.TableName.newBuilder()
.setQualifier(ByteString.copyFrom(tableName.getQualifier()))
.setNamespace(ByteString.copyFrom(tableName.getNamespace()))
.build())
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
index fce35bd..8c93520 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher;
import static org.junit.Assert.*;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 51aeff8..095e610 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
index 23e5f3f..e64c6c8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
index 06ec4a0..9847bc4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
@@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index c2139d9..a3f5382 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
index 9289a02..667c015 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
index 442ac0d..c9c4202 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
@@ -23,7 +23,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index 8746ea3..2a5bd76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.io.HFileLink;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
index 01452eb..8ebeb97 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.coprocessor.BaseMasterObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.TestTableName;
import org.junit.After;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
index 68377de..6583b2c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.junit.After;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
index 461ea98..076ce40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
@@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
[08/11] hbase git commit: HBASE-18248 Warn if monitored RPC task has
been tied up beyond a configurable threshold
Posted by ap...@apache.org.
HBASE-18248 Warn if monitored RPC task has been tied up beyond a configurable threshold
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d0941127
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d0941127
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d0941127
Branch: refs/heads/master
Commit: d0941127d424bc76688aec7952388f858d917b14
Parents: 794a3b1
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Aug 9 18:11:28 2017 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Aug 9 18:16:38 2017 -0700
----------------------------------------------------------------------
.../monitoring/MonitoredRPCHandlerImpl.java | 8 +-
.../hadoop/hbase/monitoring/MonitoredTask.java | 2 +
.../hbase/monitoring/MonitoredTaskImpl.java | 16 +++-
.../hadoop/hbase/monitoring/TaskMonitor.java | 88 +++++++++++++++++---
.../hbase/monitoring/TestTaskMonitor.java | 44 +++++++---
5 files changed, 130 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/d0941127/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
index b49df28..3ebe3b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
@@ -251,6 +251,12 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
if (getState() != State.RUNNING) {
return super.toString();
}
- return super.toString() + ", rpcMethod=" + getRPC();
+ return super.toString()
+ + ", queuetimems=" + getRPCQueueTime()
+ + ", starttimems=" + getRPCStartTime()
+ + ", clientaddress=" + clientAddress
+ + ", remoteport=" + remotePort
+ + ", packetlength=" + getRPCPacketLength()
+ + ", rpcMethod=" + getRPC();
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/d0941127/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
index ff3667b..48fba1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
@@ -39,6 +39,7 @@ public interface MonitoredTask extends Cloneable {
State getState();
long getStateTime();
long getCompletionTimestamp();
+ long getWarnTime();
void markComplete(String msg);
void pause(String msg);
@@ -48,6 +49,7 @@ public interface MonitoredTask extends Cloneable {
void setStatus(String status);
void setDescription(String description);
+ void setWarnTime(final long t);
/**
* Explicitly mark this status as able to be cleaned up,
http://git-wip-us.apache.org/repos/asf/hbase/blob/d0941127/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index dda77ac..754e3d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -30,7 +30,8 @@ class MonitoredTaskImpl implements MonitoredTask {
private long startTime;
private long statusTime;
private long stateTime;
-
+ private long warnTime;
+
private volatile String status;
private volatile String description;
@@ -42,6 +43,7 @@ class MonitoredTaskImpl implements MonitoredTask {
startTime = System.currentTimeMillis();
statusTime = startTime;
stateTime = startTime;
+ warnTime = startTime;
}
@Override
@@ -82,7 +84,12 @@ class MonitoredTaskImpl implements MonitoredTask {
public long getStateTime() {
return stateTime;
}
-
+
+ @Override
+ public long getWarnTime() {
+ return warnTime;
+ }
+
@Override
public long getCompletionTimestamp() {
if (state == State.COMPLETE || state == State.ABORTED) {
@@ -132,6 +139,11 @@ class MonitoredTaskImpl implements MonitoredTask {
}
@Override
+ public void setWarnTime(long t) {
+ this.warnTime = t;
+ }
+
+ @Override
public void cleanup() {
if (state == State.RUNNING) {
setState(State.ABORTED);
http://git-wip-us.apache.org/repos/asf/hbase/blob/d0941127/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index dc96179..780916f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -30,9 +30,12 @@ import java.util.List;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -44,16 +47,35 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
public class TaskMonitor {
private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
- // Don't keep around any tasks that have completed more than
- // 60 seconds ago
- private static final long EXPIRATION_TIME = 60*1000;
+ public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
+ public static final int DEFAULT_MAX_TASKS = 1000;
+ public static final String RPC_WARN_TIME_KEY = "hbase.taskmonitor.rpc.warn.time";
+ public static final long DEFAULT_RPC_WARN_TIME = 0;
+ public static final String EXPIRATION_TIME_KEY = "hbase.taskmonitor.expiration.time";
+ public static final long DEFAULT_EXPIRATION_TIME = 60*1000;
+ public static final String MONITOR_INTERVAL_KEY = "hbase.taskmonitor.monitor.interval";
+ public static final long DEFAULT_MONITOR_INTERVAL = 10*1000;
- @VisibleForTesting
- static final int MAX_TASKS = 1000;
-
private static TaskMonitor instance;
- private CircularFifoBuffer tasks = new CircularFifoBuffer(MAX_TASKS);
- private List<TaskAndWeakRefPair> rpcTasks = Lists.newArrayList();
+
+ private final int maxTasks;
+ private final long rpcWarnTime;
+ private final long expirationTime;
+ private final CircularFifoBuffer tasks;
+ private final List<TaskAndWeakRefPair> rpcTasks;
+ private final long monitorInterval;
+ private Thread monitorThread;
+
+ TaskMonitor(Configuration conf) {
+ maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
+ expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
+ rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
+ tasks = new CircularFifoBuffer(maxTasks);
+ rpcTasks = Lists.newArrayList();
+ monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
+ monitorThread = new Thread(new MonitorRunnable());
+ Threads.setDaemonThreadRunning(monitorThread, "Monitor thread for TaskMonitor");
+ }
/**
* Get singleton instance.
@@ -61,7 +83,7 @@ public class TaskMonitor {
*/
public static synchronized TaskMonitor get() {
if (instance == null) {
- instance = new TaskMonitor();
+ instance = new TaskMonitor(HBaseConfiguration.create());
}
return instance;
}
@@ -93,6 +115,22 @@ public class TaskMonitor {
return proxy;
}
+ private synchronized void warnStuckTasks() {
+ if (rpcWarnTime > 0) {
+ final long now = EnvironmentEdgeManager.currentTime();
+ for (Iterator<TaskAndWeakRefPair> it = rpcTasks.iterator();
+ it.hasNext();) {
+ TaskAndWeakRefPair pair = it.next();
+ MonitoredTask stat = pair.get();
+ if ((stat.getState() == MonitoredTaskImpl.State.RUNNING) &&
+ (now >= stat.getWarnTime() + rpcWarnTime)) {
+ LOG.warn("Task may be stuck: " + stat);
+ stat.setWarnTime(now);
+ }
+ }
+ }
+ }
+
private synchronized void purgeExpiredTasks() {
for (Iterator<TaskAndWeakRefPair> it = tasks.iterator();
it.hasNext();) {
@@ -139,12 +177,11 @@ public class TaskMonitor {
private boolean canPurge(MonitoredTask stat) {
long cts = stat.getCompletionTimestamp();
- return (cts > 0 && System.currentTimeMillis() - cts > EXPIRATION_TIME);
+ return (cts > 0 && EnvironmentEdgeManager.currentTime() - cts > expirationTime);
}
-
public void dumpAsText(PrintWriter out) {
- long now = System.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
List<MonitoredTask> tasks = getTasks();
for (MonitoredTask task : tasks) {
@@ -164,6 +201,12 @@ public class TaskMonitor {
}
}
+ public synchronized void shutdown() {
+ if (this.monitorThread != null) {
+ monitorThread.interrupt();
+ }
+ }
+
/**
* This class encapsulates an object as well as a weak reference to a proxy
* that passes through calls to that object. In art form:
@@ -218,4 +261,23 @@ public class TaskMonitor {
return method.invoke(delegatee, args);
}
}
+
+ private class MonitorRunnable implements Runnable {
+ private boolean running = true;
+
+ @Override
+ public void run() {
+ while (running) {
+ try {
+ Thread.sleep(monitorInterval);
+ if (tasks.isFull()) {
+ purgeExpiredTasks();
+ }
+ warnStuckTasks();
+ } catch (InterruptedException e) {
+ running = false;
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/d0941127/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
index 5464d9f..718339a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
@@ -22,8 +22,10 @@ import static org.junit.Assert.*;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -32,7 +34,7 @@ public class TestTaskMonitor {
@Test
public void testTaskMonitorBasics() {
- TaskMonitor tm = new TaskMonitor();
+ TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -55,11 +57,13 @@ public class TestTaskMonitor {
// If we mark its completion time back a few minutes, it should get gced
task.expireNow();
assertEquals(0, tm.getTasks().size());
+
+ tm.shutdown();
}
@Test
public void testTasksGetAbortedOnLeak() throws InterruptedException {
- final TaskMonitor tm = new TaskMonitor();
+ final TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -86,42 +90,58 @@ public class TestTaskMonitor {
// Now it should be aborted
MonitoredTask taskFromTm = tm.getTasks().get(0);
assertEquals(MonitoredTask.State.ABORTED, taskFromTm.getState());
+
+ tm.shutdown();
}
@Test
public void testTaskLimit() throws Exception {
- TaskMonitor tm = new TaskMonitor();
- for (int i = 0; i < TaskMonitor.MAX_TASKS + 10; i++) {
+ TaskMonitor tm = new TaskMonitor(new Configuration());
+ for (int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS + 10; i++) {
tm.createStatus("task " + i);
}
// Make sure it was limited correctly
- assertEquals(TaskMonitor.MAX_TASKS, tm.getTasks().size());
+ assertEquals(TaskMonitor.DEFAULT_MAX_TASKS, tm.getTasks().size());
// Make sure we culled the earlier tasks, not later
// (i.e. tasks 0 through 9 should have been deleted)
assertEquals("task 10", tm.getTasks().get(0).getDescription());
+ tm.shutdown();
}
@Test
public void testDoNotPurgeRPCTask() throws Exception {
int RPCTaskNums = 10;
+ TaskMonitor tm = TaskMonitor.get();
for(int i = 0; i < RPCTaskNums; i++) {
- TaskMonitor.get().createRPCStatus("PRCTask" + i);
+ tm.createRPCStatus("PRCTask" + i);
}
- for(int i = 0; i < TaskMonitor.MAX_TASKS; i++) {
- TaskMonitor.get().createStatus("otherTask" + i);
+ for(int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS; i++) {
+ tm.createStatus("otherTask" + i);
}
int remainRPCTask = 0;
- for(MonitoredTask task :TaskMonitor.get().getTasks()) {
+ for(MonitoredTask task: tm.getTasks()) {
if(task instanceof MonitoredRPCHandler) {
remainRPCTask++;
}
}
assertEquals("RPC Tasks have been purged!", RPCTaskNums, remainRPCTask);
-
+ tm.shutdown();
}
-
-
+ @Test
+ public void testWarnStuckTasks() throws Exception {
+ final int INTERVAL = 1000;
+ Configuration conf = new Configuration();
+ conf.setLong(TaskMonitor.RPC_WARN_TIME_KEY, INTERVAL);
+ conf.setLong(TaskMonitor.MONITOR_INTERVAL_KEY, INTERVAL);
+ final TaskMonitor tm = new TaskMonitor(conf);
+ MonitoredRPCHandler t = tm.createRPCStatus("test task");
+ long then = EnvironmentEdgeManager.currentTime();
+ t.setRPC("testMethod", new Object[0], then);
+ Thread.sleep(INTERVAL * 2);
+ assertTrue("We did not warn", t.getWarnTime() > then);
+ tm.shutdown();
+ }
}
[06/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
index 3c4fb61..b4c6c04 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
@@ -266,53 +266,123 @@ public final class HBaseProtos {
// @@protoc_insertion_point(enum_scope:hbase.pb.TimeUnit)
}
- public interface TableNameOrBuilder
+ public interface TableSchemaOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // required bytes namespace = 1;
+ // optional .hbase.pb.TableName table_name = 1;
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
+ */
+ boolean hasTableName();
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+ // repeated .hbase.pb.BytesBytesPair attributes = 2;
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
+ getAttributesList();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index);
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ int getAttributesCount();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getAttributesOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
+ int index);
+
+ // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>
+ getColumnFamiliesList();
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index);
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ int getColumnFamiliesCount();
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
*/
- boolean hasNamespace();
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+ getColumnFamiliesOrBuilderList();
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
*/
- com.google.protobuf.ByteString getNamespace();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
+ int index);
- // required bytes qualifier = 2;
+ // repeated .hbase.pb.NameStringPair configuration = 4;
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
+ getConfigurationList();
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ int getConfigurationCount();
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
*/
- boolean hasQualifier();
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
+ getConfigurationOrBuilderList();
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
*/
- com.google.protobuf.ByteString getQualifier();
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
+ int index);
}
/**
- * Protobuf type {@code hbase.pb.TableName}
+ * Protobuf type {@code hbase.pb.TableSchema}
*
* <pre>
**
- * Table Name
+ * Table Schema
+ * Inspired by the rest TableSchema
* </pre>
*/
- public static final class TableName extends
+ public static final class TableSchema extends
com.google.protobuf.GeneratedMessage
- implements TableNameOrBuilder {
- // Use TableName.newBuilder() to construct.
- private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ implements TableSchemaOrBuilder {
+ // Use TableSchema.newBuilder() to construct.
+ private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
- private static final TableName defaultInstance;
- public static TableName getDefaultInstance() {
+ private static final TableSchema defaultInstance;
+ public static TableSchema getDefaultInstance() {
return defaultInstance;
}
- public TableName getDefaultInstanceForType() {
+ public TableSchema getDefaultInstanceForType() {
return defaultInstance;
}
@@ -322,7 +392,7 @@ public final class HBaseProtos {
getUnknownFields() {
return this.unknownFields;
}
- private TableName(
+ private TableSchema(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -346,13 +416,40 @@ public final class HBaseProtos {
break;
}
case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
bitField0_ |= 0x00000001;
- namespace_ = input.readBytes();
break;
}
case 18: {
- bitField0_ |= 0x00000002;
- qualifier_ = input.readBytes();
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
+ break;
+ }
+ case 26: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry));
+ break;
+ }
+ case 34: {
+ if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+ configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
+ mutable_bitField0_ |= 0x00000008;
+ }
+ configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
break;
}
}
@@ -363,86 +460,211 @@ public final class HBaseProtos {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ attributes_ = java.util.Collections.unmodifiableList(attributes_);
+ }
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_);
+ }
+ if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+ configuration_ = java.util.Collections.unmodifiableList(configuration_);
+ }
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class);
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
}
- public static com.google.protobuf.Parser<TableName> PARSER =
- new com.google.protobuf.AbstractParser<TableName>() {
- public TableName parsePartialFrom(
+ public static com.google.protobuf.Parser<TableSchema> PARSER =
+ new com.google.protobuf.AbstractParser<TableSchema>() {
+ public TableSchema parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return new TableName(input, extensionRegistry);
+ return new TableSchema(input, extensionRegistry);
}
};
@java.lang.Override
- public com.google.protobuf.Parser<TableName> getParserForType() {
+ public com.google.protobuf.Parser<TableSchema> getParserForType() {
return PARSER;
}
private int bitField0_;
- // required bytes namespace = 1;
- public static final int NAMESPACE_FIELD_NUMBER = 1;
- private com.google.protobuf.ByteString namespace_;
+ // optional .hbase.pb.TableName table_name = 1;
+ public static final int TABLE_NAME_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public boolean hasNamespace() {
+ public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
+ return tableName_;
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public com.google.protobuf.ByteString getNamespace() {
- return namespace_;
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_;
}
- // required bytes qualifier = 2;
- public static final int QUALIFIER_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString qualifier_;
+ // repeated .hbase.pb.BytesBytesPair attributes = 2;
+ public static final int ATTRIBUTES_FIELD_NUMBER = 2;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_;
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
*/
- public boolean hasQualifier() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() {
+ return attributes_;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
+ getAttributesOrBuilderList() {
+ return attributes_;
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ public int getAttributesCount() {
+ return attributes_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) {
+ return attributes_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
+ int index) {
+ return attributes_.get(index);
+ }
+
+ // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
+ public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_;
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() {
+ return columnFamilies_;
+ }
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+ getColumnFamiliesOrBuilderList() {
+ return columnFamilies_;
+ }
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ public int getColumnFamiliesCount() {
+ return columnFamilies_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) {
+ return columnFamilies_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
+ int index) {
+ return columnFamilies_.get(index);
+ }
+
+ // repeated .hbase.pb.NameStringPair configuration = 4;
+ public static final int CONFIGURATION_FIELD_NUMBER = 4;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
+ return configuration_;
+ }
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
+ getConfigurationOrBuilderList() {
+ return configuration_;
+ }
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ public int getConfigurationCount() {
+ return configuration_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
+ return configuration_.get(index);
}
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
*/
- public com.google.protobuf.ByteString getQualifier() {
- return qualifier_;
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
+ int index) {
+ return configuration_.get(index);
}
private void initFields() {
- namespace_ = com.google.protobuf.ByteString.EMPTY;
- qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
+ attributes_ = java.util.Collections.emptyList();
+ columnFamilies_ = java.util.Collections.emptyList();
+ configuration_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
- if (!hasNamespace()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasQualifier()) {
- memoizedIsInitialized = 0;
- return false;
+ if (hasTableName()) {
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getAttributesCount(); i++) {
+ if (!getAttributes(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getColumnFamiliesCount(); i++) {
+ if (!getColumnFamilies(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getConfigurationCount(); i++) {
+ if (!getConfiguration(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
}
memoizedIsInitialized = 1;
return true;
@@ -452,10 +674,16 @@ public final class HBaseProtos {
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, namespace_);
+ output.writeMessage(1, tableName_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, qualifier_);
+ for (int i = 0; i < attributes_.size(); i++) {
+ output.writeMessage(2, attributes_.get(i));
+ }
+ for (int i = 0; i < columnFamilies_.size(); i++) {
+ output.writeMessage(3, columnFamilies_.get(i));
+ }
+ for (int i = 0; i < configuration_.size(); i++) {
+ output.writeMessage(4, configuration_.get(i));
}
getUnknownFields().writeTo(output);
}
@@ -468,11 +696,19 @@ public final class HBaseProtos {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, namespace_);
+ .computeMessageSize(1, tableName_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ for (int i = 0; i < attributes_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, attributes_.get(i));
+ }
+ for (int i = 0; i < columnFamilies_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, columnFamilies_.get(i));
+ }
+ for (int i = 0; i < configuration_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, qualifier_);
+ .computeMessageSize(4, configuration_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
@@ -491,22 +727,23 @@ public final class HBaseProtos {
if (obj == this) {
return true;
}
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)) {
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) {
return super.equals(obj);
}
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) obj;
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj;
boolean result = true;
- result = result && (hasNamespace() == other.hasNamespace());
- if (hasNamespace()) {
- result = result && getNamespace()
- .equals(other.getNamespace());
- }
- result = result && (hasQualifier() == other.hasQualifier());
- if (hasQualifier()) {
- result = result && getQualifier()
- .equals(other.getQualifier());
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
}
+ result = result && getAttributesList()
+ .equals(other.getAttributesList());
+ result = result && getColumnFamiliesList()
+ .equals(other.getColumnFamiliesList());
+ result = result && getConfigurationList()
+ .equals(other.getConfigurationList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@@ -520,66 +757,74 @@ public final class HBaseProtos {
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasNamespace()) {
- hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
- hash = (53 * hash) + getNamespace().hashCode();
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (getAttributesCount() > 0) {
+ hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
+ hash = (53 * hash) + getAttributesList().hashCode();
+ }
+ if (getColumnFamiliesCount() > 0) {
+ hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnFamiliesList().hashCode();
}
- if (hasQualifier()) {
- hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
- hash = (53 * hash) + getQualifier().hashCode();
+ if (getConfigurationCount() > 0) {
+ hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
+ hash = (53 * hash) + getConfigurationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(byte[] data)
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -588,7 +833,7 @@ public final class HBaseProtos {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName prototype) {
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@@ -600,29 +845,30 @@ public final class HBaseProtos {
return builder;
}
/**
- * Protobuf type {@code hbase.pb.TableName}
+ * Protobuf type {@code hbase.pb.TableSchema}
*
* <pre>
**
- * Table Name
+ * Table Schema
+ * Inspired by the rest TableSchema
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder {
+ implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class);
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
}
- // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder()
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@@ -634,6 +880,10 @@ public final class HBaseProtos {
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getTableNameFieldBuilder();
+ getAttributesFieldBuilder();
+ getColumnFamiliesFieldBuilder();
+ getConfigurationFieldBuilder();
}
}
private static Builder create() {
@@ -642,10 +892,30 @@ public final class HBaseProtos {
public Builder clear() {
super.clear();
- namespace_ = com.google.protobuf.ByteString.EMPTY;
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
+ } else {
+ tableNameBuilder_.clear();
+ }
bitField0_ = (bitField0_ & ~0x00000001);
- qualifier_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000002);
+ if (attributesBuilder_ == null) {
+ attributes_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ attributesBuilder_.clear();
+ }
+ if (columnFamiliesBuilder_ == null) {
+ columnFamilies_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ columnFamiliesBuilder_.clear();
+ }
+ if (configurationBuilder_ == null) {
+ configuration_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000008);
+ } else {
+ configurationBuilder_.clear();
+ }
return this;
}
@@ -655,67 +925,185 @@ public final class HBaseProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
}
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
}
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName build() {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = buildPartial();
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName(this);
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
- result.namespace_ = namespace_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ if (attributesBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ attributes_ = java.util.Collections.unmodifiableList(attributes_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.attributes_ = attributes_;
+ } else {
+ result.attributes_ = attributesBuilder_.build();
+ }
+ if (columnFamiliesBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_);
+ bitField0_ = (bitField0_ & ~0x00000004);
+ }
+ result.columnFamilies_ = columnFamilies_;
+ } else {
+ result.columnFamilies_ = columnFamiliesBuilder_.build();
+ }
+ if (configurationBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ configuration_ = java.util.Collections.unmodifiableList(configuration_);
+ bitField0_ = (bitField0_ & ~0x00000008);
+ }
+ result.configuration_ = configuration_;
+ } else {
+ result.configuration_ = configurationBuilder_.build();
}
- result.qualifier_ = qualifier_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)other);
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other);
} else {
super.mergeFrom(other);
return this;
}
}
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) return this;
- if (other.hasNamespace()) {
- setNamespace(other.getNamespace());
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this;
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ if (attributesBuilder_ == null) {
+ if (!other.attributes_.isEmpty()) {
+ if (attributes_.isEmpty()) {
+ attributes_ = other.attributes_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureAttributesIsMutable();
+ attributes_.addAll(other.attributes_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.attributes_.isEmpty()) {
+ if (attributesBuilder_.isEmpty()) {
+ attributesBuilder_.dispose();
+ attributesBuilder_ = null;
+ attributes_ = other.attributes_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ attributesBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getAttributesFieldBuilder() : null;
+ } else {
+ attributesBuilder_.addAllMessages(other.attributes_);
+ }
+ }
+ }
+ if (columnFamiliesBuilder_ == null) {
+ if (!other.columnFamilies_.isEmpty()) {
+ if (columnFamilies_.isEmpty()) {
+ columnFamilies_ = other.columnFamilies_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ ensureColumnFamiliesIsMutable();
+ columnFamilies_.addAll(other.columnFamilies_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.columnFamilies_.isEmpty()) {
+ if (columnFamiliesBuilder_.isEmpty()) {
+ columnFamiliesBuilder_.dispose();
+ columnFamiliesBuilder_ = null;
+ columnFamilies_ = other.columnFamilies_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ columnFamiliesBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getColumnFamiliesFieldBuilder() : null;
+ } else {
+ columnFamiliesBuilder_.addAllMessages(other.columnFamilies_);
+ }
+ }
}
- if (other.hasQualifier()) {
- setQualifier(other.getQualifier());
+ if (configurationBuilder_ == null) {
+ if (!other.configuration_.isEmpty()) {
+ if (configuration_.isEmpty()) {
+ configuration_ = other.configuration_;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ } else {
+ ensureConfigurationIsMutable();
+ configuration_.addAll(other.configuration_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.configuration_.isEmpty()) {
+ if (configurationBuilder_.isEmpty()) {
+ configurationBuilder_.dispose();
+ configurationBuilder_ = null;
+ configuration_ = other.configuration_;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ configurationBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getConfigurationFieldBuilder() : null;
+ } else {
+ configurationBuilder_.addAllMessages(other.configuration_);
+ }
+ }
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
- if (!hasNamespace()) {
-
- return false;
+ if (hasTableName()) {
+ if (!getTableName().isInitialized()) {
+
+ return false;
+ }
}
- if (!hasQualifier()) {
-
- return false;
+ for (int i = 0; i < getAttributesCount(); i++) {
+ if (!getAttributes(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ for (int i = 0; i < getColumnFamiliesCount(); i++) {
+ if (!getColumnFamilies(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ for (int i = 0; i < getConfigurationCount(); i++) {
+ if (!getConfiguration(i).isInitialized()) {
+
+ return false;
+ }
}
return true;
}
@@ -724,11 +1112,11 @@ public final class HBaseProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parsedMessage = null;
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) e.getUnfinishedMessage();
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@@ -739,1079 +1127,135 @@ public final class HBaseProtos {
}
private int bitField0_;
- // required bytes namespace = 1;
- private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY;
+ // optional .hbase.pb.TableName table_name = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public boolean hasNamespace() {
+ public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public com.google.protobuf.ByteString getNamespace() {
- return namespace_;
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
}
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setNamespace(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- namespace_ = value;
- onChanged();
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
return this;
}
/**
- * <code>required bytes namespace = 1;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder clearNamespace() {
- bitField0_ = (bitField0_ & ~0x00000001);
- namespace_ = getDefaultInstance().getNamespace();
- onChanged();
+ public Builder setTableName(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
return this;
}
-
- // required bytes qualifier = 2;
- private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public boolean hasQualifier() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
}
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public com.google.protobuf.ByteString getQualifier() {
- return qualifier_;
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
}
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setQualifier(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- qualifier_ = value;
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000001;
onChanged();
- return this;
+ return getTableNameFieldBuilder().getBuilder();
}
/**
- * <code>required bytes qualifier = 2;</code>
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder clearQualifier() {
- bitField0_ = (bitField0_ & ~0x00000002);
- qualifier_ = getDefaultInstance().getQualifier();
- onChanged();
- return this;
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_;
+ }
+ }
+ /**
+ * <code>optional .hbase.pb.TableName table_name = 1;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
+ tableName_,
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
}
- // @@protoc_insertion_point(builder_scope:hbase.pb.TableName)
- }
-
- static {
- defaultInstance = new TableName(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:hbase.pb.TableName)
- }
-
- public interface TableSchemaOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // optional .hbase.pb.TableName table_name = 1;
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- boolean hasTableName();
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
-
- // repeated .hbase.pb.BytesBytesPair attributes = 2;
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
- getAttributesList();
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index);
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- int getAttributesCount();
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
- getAttributesOrBuilderList();
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
- int index);
-
- // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>
- getColumnFamiliesList();
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index);
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- int getColumnFamiliesCount();
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
- getColumnFamiliesOrBuilderList();
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
- int index);
-
- // repeated .hbase.pb.NameStringPair configuration = 4;
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
- getConfigurationList();
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- int getConfigurationCount();
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getConfigurationOrBuilderList();
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
- int index);
- }
- /**
- * Protobuf type {@code hbase.pb.TableSchema}
- *
- * <pre>
- **
- * Table Schema
- * Inspired by the rest TableSchema
- * </pre>
- */
- public static final class TableSchema extends
- com.google.protobuf.GeneratedMessage
- implements TableSchemaOrBuilder {
- // Use TableSchema.newBuilder() to construct.
- private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final TableSchema defaultInstance;
- public static TableSchema getDefaultInstance() {
- return defaultInstance;
- }
+ // repeated .hbase.pb.BytesBytesPair attributes = 2;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ =
+ java.util.Collections.emptyList();
+ private void ensureAttributesIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_);
+ bitField0_ |= 0x00000002;
+ }
+ }
- public TableSchema getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private TableSchema(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = tableName_.toBuilder();
- }
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(tableName_);
- tableName_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000001;
- break;
- }
- case 18: {
- if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
- attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
- mutable_bitField0_ |= 0x00000002;
- }
- attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
- break;
- }
- case 26: {
- if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
- columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>();
- mutable_bitField0_ |= 0x00000004;
- }
- columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry));
- break;
- }
- case 34: {
- if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
- configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
- mutable_bitField0_ |= 0x00000008;
- }
- configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
- attributes_ = java.util.Collections.unmodifiableList(attributes_);
- }
- if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
- columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_);
- }
- if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
- configuration_ = java.util.Collections.unmodifiableList(configuration_);
- }
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
- }
-
- public static com.google.protobuf.Parser<TableSchema> PARSER =
- new com.google.protobuf.AbstractParser<TableSchema>() {
- public TableSchema parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new TableSchema(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<TableSchema> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // optional .hbase.pb.TableName table_name = 1;
- public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public boolean hasTableName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
- return tableName_;
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
- return tableName_;
- }
-
- // repeated .hbase.pb.BytesBytesPair attributes = 2;
- public static final int ATTRIBUTES_FIELD_NUMBER = 2;
- private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_;
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() {
- return attributes_;
- }
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
- getAttributesOrBuilderList() {
- return attributes_;
- }
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- public int getAttributesCount() {
- return attributes_.size();
- }
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) {
- return attributes_.get(index);
- }
- /**
- * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
- int index) {
- return attributes_.get(index);
- }
-
- // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
- public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3;
- private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_;
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() {
- return columnFamilies_;
- }
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
- getColumnFamiliesOrBuilderList() {
- return columnFamilies_;
- }
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- public int getColumnFamiliesCount() {
- return columnFamilies_.size();
- }
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) {
- return columnFamilies_.get(index);
- }
- /**
- * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
- int index) {
- return columnFamilies_.get(index);
- }
-
- // repeated .hbase.pb.NameStringPair configuration = 4;
- public static final int CONFIGURATION_FIELD_NUMBER = 4;
- private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
- return configuration_;
- }
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getConfigurationOrBuilderList() {
- return configuration_;
- }
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- public int getConfigurationCount() {
- return configuration_.size();
- }
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
- return configuration_.get(index);
- }
- /**
- * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
- int index) {
- return configuration_.get(index);
- }
-
- private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
- attributes_ = java.util.Collections.emptyList();
- columnFamilies_ = java.util.Collections.emptyList();
- configuration_ = java.util.Collections.emptyList();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (hasTableName()) {
- if (!getTableName().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- for (int i = 0; i < getAttributesCount(); i++) {
- if (!getAttributes(i).isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- for (int i = 0; i < getColumnFamiliesCount(); i++) {
- if (!getColumnFamilies(i).isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- for (int i = 0; i < getConfigurationCount(); i++) {
- if (!getConfiguration(i).isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, tableName_);
- }
- for (int i = 0; i < attributes_.size(); i++) {
- output.writeMessage(2, attributes_.get(i));
- }
- for (int i = 0; i < columnFamilies_.size(); i++) {
- output.writeMessage(3, columnFamilies_.get(i));
- }
- for (int i = 0; i < configuration_.size(); i++) {
- output.writeMessage(4, configuration_.get(i));
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, tableName_);
- }
- for (int i = 0; i < attributes_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, attributes_.get(i));
- }
- for (int i = 0; i < columnFamilies_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(3, columnFamilies_.get(i));
- }
- for (int i = 0; i < configuration_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(4, configuration_.get(i));
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj;
-
- boolean result = true;
- result = result && (hasTableName() == other.hasTableName());
- if (hasTableName()) {
- result = result && getTableName()
- .equals(other.getTableName());
- }
- result = result && getAttributesList()
- .equals(other.getAttributesList());
- result = result && getColumnFamiliesList()
- .equals(other.getColumnFamiliesList());
- result = result && getConfigurationList()
- .equals(other.getConfigurationList());
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- private int memoizedHashCode = 0;
- @java.lang.Override
- public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasTableName()) {
- hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
- hash = (53 * hash) + getTableName().hashCode();
- }
- if (getAttributesCount() > 0) {
- hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
- hash = (53 * hash) + getAttributesList().hashCode();
- }
- if (getColumnFamiliesCount() > 0) {
- hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER;
- hash = (53 * hash) + getColumnFamiliesList().hashCode();
- }
- if (getConfigurationCount() > 0) {
- hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
- hash = (53 * hash) + getConfigurationList().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code hbase.pb.TableSchema}
- *
- * <pre>
- **
- * Table Schema
- * Inspired by the rest TableSchema
- * </pre>
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getTableNameFieldBuilder();
- getAttributesFieldBuilder();
- getColumnFamiliesFieldBuilder();
- getConfigurationFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
- } else {
- tableNameBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- if (attributesBuilder_ == null) {
- attributes_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000002);
- } else {
- attributesBuilder_.clear();
- }
- if (columnFamiliesBuilder_ == null) {
- columnFamilies_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000004);
- } else {
- columnFamiliesBuilder_.clear();
- }
- if (configurationBuilder_ == null) {
- configuration_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000008);
- } else {
- configurationBuilder_.clear();
- }
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- if (tableNameBuilder_ == null) {
- result.tableName_ = tableName_;
- } else {
- result.tableName_ = tableNameBuilder_.build();
- }
- if (attributesBuilder_ == null) {
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- attributes_ = java.util.Collections.unmodifiableList(attributes_);
- bitField0_ = (bitField0_ & ~0x00000002);
- }
- result.attributes_ = attributes_;
- } else {
- result.attributes_ = attributesBuilder_.build();
- }
- if (columnFamiliesBuilder_ == null) {
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_);
- bitField0_ = (bitField0_ & ~0x00000004);
- }
- result.columnFamilies_ = columnFamilies_;
- } else {
- result.columnFamilies_ = columnFamiliesBuilder_.build();
- }
- if (configurationBuilder_ == null) {
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- configuration_ = java.util.Collections.unmodifiableList(configuration_);
- bitField0_ = (bitField0_ & ~0x00000008);
- }
- result.configuration_ = configuration_;
- } else {
- result.configuration_ = configurationBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this;
- if (other.hasTableName()) {
- mergeTableName(other.getTableName());
- }
- if (attributesBuilder_ == null) {
- if (!other.attributes_.isEmpty()) {
- if (attributes_.isEmpty()) {
- attributes_ = other.attributes_;
- bitField0_ = (bitField0_ & ~0x00000002);
- } else {
- ensureAttributesIsMutable();
- attributes_.addAll(other.attributes_);
- }
- onChanged();
- }
- } else {
- if (!other.attributes_.isEmpty()) {
- if (attributesBuilder_.isEmpty()) {
- attributesBuilder_.dispose();
- attributesBuilder_ = null;
- attributes_ = other.attributes_;
- bitField0_ = (bitField0_ & ~0x00000002);
- attributesBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getAttributesFieldBuilder() : null;
- } else {
- attributesBuilder_.addAllMessages(other.attributes_);
- }
- }
- }
- if (columnFamiliesBuilder_ == null) {
- if (!other.columnFamilies_.isEmpty()) {
- if (columnFamilies_.isEmpty()) {
- columnFamilies_ = other.columnFamilies_;
- bitField0_ = (bitField0_ & ~0x00000004);
- } else {
- ensureColumnFamiliesIsMutable();
- columnFamilies_.addAll(other.columnFamilies_);
- }
- onChanged();
- }
- } else {
- if (!other.columnFamilies_.isEmpty()) {
- if (columnFamiliesBuilder_.isEmpty()) {
- columnFamiliesBuilder_.dispose();
- columnFamiliesBuilder_ = null;
- columnFamilies_ = other.columnFamilies_;
- bitField0_ = (bitField0_ & ~0x00000004);
- columnFamiliesBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getColumnFamiliesFieldBuilder() : null;
- } else {
- columnFamiliesBuilder_.addAllMessages(other.columnFamilies_);
- }
- }
- }
- if (configurationBuilder_ == null) {
- if (!other.configuration_.isEmpty()) {
- if (configuration_.isEmpty()) {
- configuration_ = other.configuration_;
- bitField0_ = (bitField0_ & ~0x00000008);
- } else {
- ensureConfigurationIsMutable();
- configuration_.addAll(other.configuration_);
- }
- onChanged();
- }
- } else {
- if (!other.configuration_.isEmpty()) {
- if (configurationBuilder_.isEmpty()) {
- configurationBuilder_.dispose();
- configurationBuilder_ = null;
- configuration_ = other.configuration_;
- bitField0_ = (bitField0_ & ~0x00000008);
- configurationBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getConfigurationFieldBuilder() : null;
- } else {
- configurationBuilder_.addAllMessages(other.configuration_);
- }
- }
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (hasTableName()) {
- if (!getTableName().isInitialized()) {
-
- return false;
- }
- }
- for (int i = 0; i < getAttributesCount(); i++) {
- if (!getAttributes(i).isInitialized()) {
-
- return false;
- }
- }
- for (int i = 0; i < getColumnFamiliesCount(); i++) {
- if (!getColumnFamilies(i).isInitialized()) {
-
- return false;
- }
- }
- for (int i = 0; i < getConfigurationCount(); i++) {
- if (!getConfiguration(i).isInitialized()) {
-
- return false;
- }
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // optional .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public boolean hasTableName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
- if (tableNameBuilder_ == null) {
- return tableName_;
- } else {
- return tableNameBuilder_.getMessage();
- }
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
- if (tableNameBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- tableName_ = value;
- onChanged();
- } else {
- tableNameBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
- if (tableNameBuilder_ == null) {
- tableName_ = builderForValue.build();
- onChanged();
- } else {
- tableNameBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
- if (tableNameBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
- tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
- } else {
- tableName_ = value;
- }
- onChanged();
- } else {
- tableNameBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public Builder clearTableName() {
- if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
- onChanged();
- } else {
- tableNameBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
- /**
- * <code>optional .hbase.pb.TableName table_name = 1;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
- bitField0_ |= 0x00000001;
- onChanged();
- return getTableNameFieldBuilder().getBuilder();
- }
- /**
- * <code>optional .hbase.pb
<TRUNCATED>
[09/11] hbase git commit: HBASE-18248 Warn if monitored RPC task has
been tied up beyond a configurable threshold
Posted by ap...@apache.org.
HBASE-18248 Warn if monitored RPC task has been tied up beyond a configurable threshold
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a9021755
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a9021755
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a9021755
Branch: refs/heads/branch-2
Commit: a90217555346bb0768bfb410bb0d1fa02d6d562d
Parents: f855b51
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Aug 9 18:11:28 2017 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Aug 9 18:16:57 2017 -0700
----------------------------------------------------------------------
.../monitoring/MonitoredRPCHandlerImpl.java | 8 +-
.../hadoop/hbase/monitoring/MonitoredTask.java | 2 +
.../hbase/monitoring/MonitoredTaskImpl.java | 16 +++-
.../hadoop/hbase/monitoring/TaskMonitor.java | 88 +++++++++++++++++---
.../hbase/monitoring/TestTaskMonitor.java | 44 +++++++---
5 files changed, 130 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9021755/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
index b49df28..3ebe3b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
@@ -251,6 +251,12 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
if (getState() != State.RUNNING) {
return super.toString();
}
- return super.toString() + ", rpcMethod=" + getRPC();
+ return super.toString()
+ + ", queuetimems=" + getRPCQueueTime()
+ + ", starttimems=" + getRPCStartTime()
+ + ", clientaddress=" + clientAddress
+ + ", remoteport=" + remotePort
+ + ", packetlength=" + getRPCPacketLength()
+ + ", rpcMethod=" + getRPC();
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9021755/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
index ff3667b..48fba1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
@@ -39,6 +39,7 @@ public interface MonitoredTask extends Cloneable {
State getState();
long getStateTime();
long getCompletionTimestamp();
+ long getWarnTime();
void markComplete(String msg);
void pause(String msg);
@@ -48,6 +49,7 @@ public interface MonitoredTask extends Cloneable {
void setStatus(String status);
void setDescription(String description);
+ void setWarnTime(final long t);
/**
* Explicitly mark this status as able to be cleaned up,
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9021755/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index dda77ac..754e3d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -30,7 +30,8 @@ class MonitoredTaskImpl implements MonitoredTask {
private long startTime;
private long statusTime;
private long stateTime;
-
+ private long warnTime;
+
private volatile String status;
private volatile String description;
@@ -42,6 +43,7 @@ class MonitoredTaskImpl implements MonitoredTask {
startTime = System.currentTimeMillis();
statusTime = startTime;
stateTime = startTime;
+ warnTime = startTime;
}
@Override
@@ -82,7 +84,12 @@ class MonitoredTaskImpl implements MonitoredTask {
public long getStateTime() {
return stateTime;
}
-
+
+ @Override
+ public long getWarnTime() {
+ return warnTime;
+ }
+
@Override
public long getCompletionTimestamp() {
if (state == State.COMPLETE || state == State.ABORTED) {
@@ -132,6 +139,11 @@ class MonitoredTaskImpl implements MonitoredTask {
}
@Override
+ public void setWarnTime(long t) {
+ this.warnTime = t;
+ }
+
+ @Override
public void cleanup() {
if (state == State.RUNNING) {
setState(State.ABORTED);
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9021755/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index dc96179..780916f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -30,9 +30,12 @@ import java.util.List;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -44,16 +47,35 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
public class TaskMonitor {
private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
- // Don't keep around any tasks that have completed more than
- // 60 seconds ago
- private static final long EXPIRATION_TIME = 60*1000;
+ public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
+ public static final int DEFAULT_MAX_TASKS = 1000;
+ public static final String RPC_WARN_TIME_KEY = "hbase.taskmonitor.rpc.warn.time";
+ public static final long DEFAULT_RPC_WARN_TIME = 0;
+ public static final String EXPIRATION_TIME_KEY = "hbase.taskmonitor.expiration.time";
+ public static final long DEFAULT_EXPIRATION_TIME = 60*1000;
+ public static final String MONITOR_INTERVAL_KEY = "hbase.taskmonitor.monitor.interval";
+ public static final long DEFAULT_MONITOR_INTERVAL = 10*1000;
- @VisibleForTesting
- static final int MAX_TASKS = 1000;
-
private static TaskMonitor instance;
- private CircularFifoBuffer tasks = new CircularFifoBuffer(MAX_TASKS);
- private List<TaskAndWeakRefPair> rpcTasks = Lists.newArrayList();
+
+ private final int maxTasks;
+ private final long rpcWarnTime;
+ private final long expirationTime;
+ private final CircularFifoBuffer tasks;
+ private final List<TaskAndWeakRefPair> rpcTasks;
+ private final long monitorInterval;
+ private Thread monitorThread;
+
+ TaskMonitor(Configuration conf) {
+ maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
+ expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
+ rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
+ tasks = new CircularFifoBuffer(maxTasks);
+ rpcTasks = Lists.newArrayList();
+ monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
+ monitorThread = new Thread(new MonitorRunnable());
+ Threads.setDaemonThreadRunning(monitorThread, "Monitor thread for TaskMonitor");
+ }
/**
* Get singleton instance.
@@ -61,7 +83,7 @@ public class TaskMonitor {
*/
public static synchronized TaskMonitor get() {
if (instance == null) {
- instance = new TaskMonitor();
+ instance = new TaskMonitor(HBaseConfiguration.create());
}
return instance;
}
@@ -93,6 +115,22 @@ public class TaskMonitor {
return proxy;
}
+ private synchronized void warnStuckTasks() {
+ if (rpcWarnTime > 0) {
+ final long now = EnvironmentEdgeManager.currentTime();
+ for (Iterator<TaskAndWeakRefPair> it = rpcTasks.iterator();
+ it.hasNext();) {
+ TaskAndWeakRefPair pair = it.next();
+ MonitoredTask stat = pair.get();
+ if ((stat.getState() == MonitoredTaskImpl.State.RUNNING) &&
+ (now >= stat.getWarnTime() + rpcWarnTime)) {
+ LOG.warn("Task may be stuck: " + stat);
+ stat.setWarnTime(now);
+ }
+ }
+ }
+ }
+
private synchronized void purgeExpiredTasks() {
for (Iterator<TaskAndWeakRefPair> it = tasks.iterator();
it.hasNext();) {
@@ -139,12 +177,11 @@ public class TaskMonitor {
private boolean canPurge(MonitoredTask stat) {
long cts = stat.getCompletionTimestamp();
- return (cts > 0 && System.currentTimeMillis() - cts > EXPIRATION_TIME);
+ return (cts > 0 && EnvironmentEdgeManager.currentTime() - cts > expirationTime);
}
-
public void dumpAsText(PrintWriter out) {
- long now = System.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
List<MonitoredTask> tasks = getTasks();
for (MonitoredTask task : tasks) {
@@ -164,6 +201,12 @@ public class TaskMonitor {
}
}
+ public synchronized void shutdown() {
+ if (this.monitorThread != null) {
+ monitorThread.interrupt();
+ }
+ }
+
/**
* This class encapsulates an object as well as a weak reference to a proxy
* that passes through calls to that object. In art form:
@@ -218,4 +261,23 @@ public class TaskMonitor {
return method.invoke(delegatee, args);
}
}
+
+ private class MonitorRunnable implements Runnable {
+ private boolean running = true;
+
+ @Override
+ public void run() {
+ while (running) {
+ try {
+ Thread.sleep(monitorInterval);
+ if (tasks.isFull()) {
+ purgeExpiredTasks();
+ }
+ warnStuckTasks();
+ } catch (InterruptedException e) {
+ running = false;
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9021755/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
index 5464d9f..718339a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
@@ -22,8 +22,10 @@ import static org.junit.Assert.*;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -32,7 +34,7 @@ public class TestTaskMonitor {
@Test
public void testTaskMonitorBasics() {
- TaskMonitor tm = new TaskMonitor();
+ TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -55,11 +57,13 @@ public class TestTaskMonitor {
// If we mark its completion time back a few minutes, it should get gced
task.expireNow();
assertEquals(0, tm.getTasks().size());
+
+ tm.shutdown();
}
@Test
public void testTasksGetAbortedOnLeak() throws InterruptedException {
- final TaskMonitor tm = new TaskMonitor();
+ final TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -86,42 +90,58 @@ public class TestTaskMonitor {
// Now it should be aborted
MonitoredTask taskFromTm = tm.getTasks().get(0);
assertEquals(MonitoredTask.State.ABORTED, taskFromTm.getState());
+
+ tm.shutdown();
}
@Test
public void testTaskLimit() throws Exception {
- TaskMonitor tm = new TaskMonitor();
- for (int i = 0; i < TaskMonitor.MAX_TASKS + 10; i++) {
+ TaskMonitor tm = new TaskMonitor(new Configuration());
+ for (int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS + 10; i++) {
tm.createStatus("task " + i);
}
// Make sure it was limited correctly
- assertEquals(TaskMonitor.MAX_TASKS, tm.getTasks().size());
+ assertEquals(TaskMonitor.DEFAULT_MAX_TASKS, tm.getTasks().size());
// Make sure we culled the earlier tasks, not later
// (i.e. tasks 0 through 9 should have been deleted)
assertEquals("task 10", tm.getTasks().get(0).getDescription());
+ tm.shutdown();
}
@Test
public void testDoNotPurgeRPCTask() throws Exception {
int RPCTaskNums = 10;
+ TaskMonitor tm = TaskMonitor.get();
for(int i = 0; i < RPCTaskNums; i++) {
- TaskMonitor.get().createRPCStatus("PRCTask" + i);
+ tm.createRPCStatus("PRCTask" + i);
}
- for(int i = 0; i < TaskMonitor.MAX_TASKS; i++) {
- TaskMonitor.get().createStatus("otherTask" + i);
+ for(int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS; i++) {
+ tm.createStatus("otherTask" + i);
}
int remainRPCTask = 0;
- for(MonitoredTask task :TaskMonitor.get().getTasks()) {
+ for(MonitoredTask task: tm.getTasks()) {
if(task instanceof MonitoredRPCHandler) {
remainRPCTask++;
}
}
assertEquals("RPC Tasks have been purged!", RPCTaskNums, remainRPCTask);
-
+ tm.shutdown();
}
-
-
+ @Test
+ public void testWarnStuckTasks() throws Exception {
+ final int INTERVAL = 1000;
+ Configuration conf = new Configuration();
+ conf.setLong(TaskMonitor.RPC_WARN_TIME_KEY, INTERVAL);
+ conf.setLong(TaskMonitor.MONITOR_INTERVAL_KEY, INTERVAL);
+ final TaskMonitor tm = new TaskMonitor(conf);
+ MonitoredRPCHandler t = tm.createRPCStatus("test task");
+ long then = EnvironmentEdgeManager.currentTime();
+ t.setRPC("testMethod", new Object[0], then);
+ Thread.sleep(INTERVAL * 2);
+ assertTrue("We did not warn", t.getWarnTime() > then);
+ tm.shutdown();
+ }
}
[11/11] hbase git commit: HBASE-18248 Warn if monitored RPC task has
been tied up beyond a configurable threshold
Posted by ap...@apache.org.
HBASE-18248 Warn if monitored RPC task has been tied up beyond a configurable threshold
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b6ff1d5e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b6ff1d5e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b6ff1d5e
Branch: refs/heads/branch-1.4
Commit: b6ff1d5e8ad05a3d40534e3393669c64b621a32c
Parents: e9e16b5
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Aug 9 18:11:28 2017 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Aug 9 18:17:29 2017 -0700
----------------------------------------------------------------------
.../monitoring/MonitoredRPCHandlerImpl.java | 8 +-
.../hadoop/hbase/monitoring/MonitoredTask.java | 2 +
.../hbase/monitoring/MonitoredTaskImpl.java | 16 +++-
.../hadoop/hbase/monitoring/TaskMonitor.java | 88 +++++++++++++++++---
.../hbase/monitoring/TestTaskMonitor.java | 44 +++++++---
5 files changed, 130 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/b6ff1d5e/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
index a29595b..08c8c9f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
@@ -251,6 +251,12 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
if (getState() != State.RUNNING) {
return super.toString();
}
- return super.toString() + ", rpcMethod=" + getRPC();
+ return super.toString()
+ + ", queuetimems=" + getRPCQueueTime()
+ + ", starttimems=" + getRPCStartTime()
+ + ", clientaddress=" + clientAddress
+ + ", remoteport=" + remotePort
+ + ", packetlength=" + getRPCPacketLength()
+ + ", rpcMethod=" + getRPC();
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b6ff1d5e/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
index ff3667b..48fba1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
@@ -39,6 +39,7 @@ public interface MonitoredTask extends Cloneable {
State getState();
long getStateTime();
long getCompletionTimestamp();
+ long getWarnTime();
void markComplete(String msg);
void pause(String msg);
@@ -48,6 +49,7 @@ public interface MonitoredTask extends Cloneable {
void setStatus(String status);
void setDescription(String description);
+ void setWarnTime(final long t);
/**
* Explicitly mark this status as able to be cleaned up,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b6ff1d5e/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index 27aaceb..0cee4c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -30,7 +30,8 @@ class MonitoredTaskImpl implements MonitoredTask {
private long startTime;
private long statusTime;
private long stateTime;
-
+ private long warnTime;
+
private volatile String status;
private volatile String description;
@@ -42,6 +43,7 @@ class MonitoredTaskImpl implements MonitoredTask {
startTime = System.currentTimeMillis();
statusTime = startTime;
stateTime = startTime;
+ warnTime = startTime;
}
@Override
@@ -82,7 +84,12 @@ class MonitoredTaskImpl implements MonitoredTask {
public long getStateTime() {
return stateTime;
}
-
+
+ @Override
+ public long getWarnTime() {
+ return warnTime;
+ }
+
@Override
public long getCompletionTimestamp() {
if (state == State.COMPLETE || state == State.ABORTED) {
@@ -132,6 +139,11 @@ class MonitoredTaskImpl implements MonitoredTask {
}
@Override
+ public void setWarnTime(long t) {
+ this.warnTime = t;
+ }
+
+ @Override
public void cleanup() {
if (state == State.RUNNING) {
setState(State.ABORTED);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b6ff1d5e/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 53db6a9..0f91234 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -30,9 +30,12 @@ import java.util.List;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Threads;
-import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
/**
@@ -44,16 +47,35 @@ import com.google.common.collect.Lists;
public class TaskMonitor {
private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
- // Don't keep around any tasks that have completed more than
- // 60 seconds ago
- private static final long EXPIRATION_TIME = 60*1000;
+ public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
+ public static final int DEFAULT_MAX_TASKS = 1000;
+ public static final String RPC_WARN_TIME_KEY = "hbase.taskmonitor.rpc.warn.time";
+ public static final long DEFAULT_RPC_WARN_TIME = 0;
+ public static final String EXPIRATION_TIME_KEY = "hbase.taskmonitor.expiration.time";
+ public static final long DEFAULT_EXPIRATION_TIME = 60*1000;
+ public static final String MONITOR_INTERVAL_KEY = "hbase.taskmonitor.monitor.interval";
+ public static final long DEFAULT_MONITOR_INTERVAL = 10*1000;
- @VisibleForTesting
- static final int MAX_TASKS = 1000;
-
private static TaskMonitor instance;
- private CircularFifoBuffer tasks = new CircularFifoBuffer(MAX_TASKS);
- private List<TaskAndWeakRefPair> rpcTasks = Lists.newArrayList();
+
+ private final int maxTasks;
+ private final long rpcWarnTime;
+ private final long expirationTime;
+ private final CircularFifoBuffer tasks;
+ private final List<TaskAndWeakRefPair> rpcTasks;
+ private final long monitorInterval;
+ private Thread monitorThread;
+
+ TaskMonitor(Configuration conf) {
+ maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
+ expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
+ rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
+ tasks = new CircularFifoBuffer(maxTasks);
+ rpcTasks = Lists.newArrayList();
+ monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
+ monitorThread = new Thread(new MonitorRunnable());
+ Threads.setDaemonThreadRunning(monitorThread, "Monitor thread for TaskMonitor");
+ }
/**
* Get singleton instance.
@@ -61,7 +83,7 @@ public class TaskMonitor {
*/
public static synchronized TaskMonitor get() {
if (instance == null) {
- instance = new TaskMonitor();
+ instance = new TaskMonitor(HBaseConfiguration.create());
}
return instance;
}
@@ -93,6 +115,22 @@ public class TaskMonitor {
return proxy;
}
+ private synchronized void warnStuckTasks() {
+ if (rpcWarnTime > 0) {
+ final long now = EnvironmentEdgeManager.currentTime();
+ for (Iterator<TaskAndWeakRefPair> it = rpcTasks.iterator();
+ it.hasNext();) {
+ TaskAndWeakRefPair pair = it.next();
+ MonitoredTask stat = pair.get();
+ if ((stat.getState() == MonitoredTaskImpl.State.RUNNING) &&
+ (now >= stat.getWarnTime() + rpcWarnTime)) {
+ LOG.warn("Task may be stuck: " + stat);
+ stat.setWarnTime(now);
+ }
+ }
+ }
+ }
+
private synchronized void purgeExpiredTasks() {
for (Iterator<TaskAndWeakRefPair> it = tasks.iterator();
it.hasNext();) {
@@ -140,12 +178,11 @@ public class TaskMonitor {
private boolean canPurge(MonitoredTask stat) {
long cts = stat.getCompletionTimestamp();
- return (cts > 0 && System.currentTimeMillis() - cts > EXPIRATION_TIME);
+ return (cts > 0 && EnvironmentEdgeManager.currentTime() - cts > expirationTime);
}
-
public void dumpAsText(PrintWriter out) {
- long now = System.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
List<MonitoredTask> tasks = getTasks();
for (MonitoredTask task : tasks) {
@@ -165,6 +202,12 @@ public class TaskMonitor {
}
}
+ public synchronized void shutdown() {
+ if (this.monitorThread != null) {
+ monitorThread.interrupt();
+ }
+ }
+
/**
* This class encapsulates an object as well as a weak reference to a proxy
* that passes through calls to that object. In art form:
@@ -219,4 +262,23 @@ public class TaskMonitor {
return method.invoke(delegatee, args);
}
}
+
+ private class MonitorRunnable implements Runnable {
+ private boolean running = true;
+
+ @Override
+ public void run() {
+ while (running) {
+ try {
+ Thread.sleep(monitorInterval);
+ if (tasks.isFull()) {
+ purgeExpiredTasks();
+ }
+ warnStuckTasks();
+ } catch (InterruptedException e) {
+ running = false;
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b6ff1d5e/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
index d09b1d1..0914b85 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
@@ -22,7 +22,9 @@ import static org.junit.Assert.*;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -31,7 +33,7 @@ public class TestTaskMonitor {
@Test
public void testTaskMonitorBasics() {
- TaskMonitor tm = new TaskMonitor();
+ TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -54,11 +56,13 @@ public class TestTaskMonitor {
// If we mark its completion time back a few minutes, it should get gced
task.expireNow();
assertEquals(0, tm.getTasks().size());
+
+ tm.shutdown();
}
@Test
public void testTasksGetAbortedOnLeak() throws InterruptedException {
- final TaskMonitor tm = new TaskMonitor();
+ final TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -85,42 +89,58 @@ public class TestTaskMonitor {
// Now it should be aborted
MonitoredTask taskFromTm = tm.getTasks().get(0);
assertEquals(MonitoredTask.State.ABORTED, taskFromTm.getState());
+
+ tm.shutdown();
}
@Test
public void testTaskLimit() throws Exception {
- TaskMonitor tm = new TaskMonitor();
- for (int i = 0; i < TaskMonitor.MAX_TASKS + 10; i++) {
+ TaskMonitor tm = new TaskMonitor(new Configuration());
+ for (int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS + 10; i++) {
tm.createStatus("task " + i);
}
// Make sure it was limited correctly
- assertEquals(TaskMonitor.MAX_TASKS, tm.getTasks().size());
+ assertEquals(TaskMonitor.DEFAULT_MAX_TASKS, tm.getTasks().size());
// Make sure we culled the earlier tasks, not later
// (i.e. tasks 0 through 9 should have been deleted)
assertEquals("task 10", tm.getTasks().get(0).getDescription());
+ tm.shutdown();
}
@Test
public void testDoNotPurgeRPCTask() throws Exception {
int RPCTaskNums = 10;
+ TaskMonitor tm = TaskMonitor.get();
for(int i = 0; i < RPCTaskNums; i++) {
- TaskMonitor.get().createRPCStatus("PRCTask" + i);
+ tm.createRPCStatus("PRCTask" + i);
}
- for(int i = 0; i < TaskMonitor.MAX_TASKS; i++) {
- TaskMonitor.get().createStatus("otherTask" + i);
+ for(int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS; i++) {
+ tm.createStatus("otherTask" + i);
}
int remainRPCTask = 0;
- for(MonitoredTask task :TaskMonitor.get().getTasks()) {
+ for(MonitoredTask task: tm.getTasks()) {
if(task instanceof MonitoredRPCHandler) {
remainRPCTask++;
}
}
assertEquals("RPC Tasks have been purged!", RPCTaskNums, remainRPCTask);
-
+ tm.shutdown();
}
-
-
+ @Test
+ public void testWarnStuckTasks() throws Exception {
+ final int INTERVAL = 1000;
+ Configuration conf = new Configuration();
+ conf.setLong(TaskMonitor.RPC_WARN_TIME_KEY, INTERVAL);
+ conf.setLong(TaskMonitor.MONITOR_INTERVAL_KEY, INTERVAL);
+ final TaskMonitor tm = new TaskMonitor(conf);
+ MonitoredRPCHandler t = tm.createRPCStatus("test task");
+ long then = EnvironmentEdgeManager.currentTime();
+ t.setRPC("testMethod", new Object[0], then);
+ Thread.sleep(INTERVAL * 2);
+ assertTrue("We did not warn", t.getWarnTime() > then);
+ tm.shutdown();
+ }
}
[10/11] hbase git commit: HBASE-18248 Warn if monitored RPC task has
been tied up beyond a configurable threshold
Posted by ap...@apache.org.
HBASE-18248 Warn if monitored RPC task has been tied up beyond a configurable threshold
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/685ab190
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/685ab190
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/685ab190
Branch: refs/heads/branch-1
Commit: 685ab19064e3a83fcb9a9af2f6ebbf7e2f85d6ab
Parents: 036fce3
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Aug 9 18:11:28 2017 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Aug 9 18:17:21 2017 -0700
----------------------------------------------------------------------
.../monitoring/MonitoredRPCHandlerImpl.java | 8 +-
.../hadoop/hbase/monitoring/MonitoredTask.java | 2 +
.../hbase/monitoring/MonitoredTaskImpl.java | 16 +++-
.../hadoop/hbase/monitoring/TaskMonitor.java | 88 +++++++++++++++++---
.../hbase/monitoring/TestTaskMonitor.java | 44 +++++++---
5 files changed, 130 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/685ab190/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
index a29595b..08c8c9f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
@@ -251,6 +251,12 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
if (getState() != State.RUNNING) {
return super.toString();
}
- return super.toString() + ", rpcMethod=" + getRPC();
+ return super.toString()
+ + ", queuetimems=" + getRPCQueueTime()
+ + ", starttimems=" + getRPCStartTime()
+ + ", clientaddress=" + clientAddress
+ + ", remoteport=" + remotePort
+ + ", packetlength=" + getRPCPacketLength()
+ + ", rpcMethod=" + getRPC();
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/685ab190/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
index ff3667b..48fba1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
@@ -39,6 +39,7 @@ public interface MonitoredTask extends Cloneable {
State getState();
long getStateTime();
long getCompletionTimestamp();
+ long getWarnTime();
void markComplete(String msg);
void pause(String msg);
@@ -48,6 +49,7 @@ public interface MonitoredTask extends Cloneable {
void setStatus(String status);
void setDescription(String description);
+ void setWarnTime(final long t);
/**
* Explicitly mark this status as able to be cleaned up,
http://git-wip-us.apache.org/repos/asf/hbase/blob/685ab190/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index 27aaceb..0cee4c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -30,7 +30,8 @@ class MonitoredTaskImpl implements MonitoredTask {
private long startTime;
private long statusTime;
private long stateTime;
-
+ private long warnTime;
+
private volatile String status;
private volatile String description;
@@ -42,6 +43,7 @@ class MonitoredTaskImpl implements MonitoredTask {
startTime = System.currentTimeMillis();
statusTime = startTime;
stateTime = startTime;
+ warnTime = startTime;
}
@Override
@@ -82,7 +84,12 @@ class MonitoredTaskImpl implements MonitoredTask {
public long getStateTime() {
return stateTime;
}
-
+
+ @Override
+ public long getWarnTime() {
+ return warnTime;
+ }
+
@Override
public long getCompletionTimestamp() {
if (state == State.COMPLETE || state == State.ABORTED) {
@@ -132,6 +139,11 @@ class MonitoredTaskImpl implements MonitoredTask {
}
@Override
+ public void setWarnTime(long t) {
+ this.warnTime = t;
+ }
+
+ @Override
public void cleanup() {
if (state == State.RUNNING) {
setState(State.ABORTED);
http://git-wip-us.apache.org/repos/asf/hbase/blob/685ab190/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 53db6a9..0f91234 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -30,9 +30,12 @@ import java.util.List;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Threads;
-import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
/**
@@ -44,16 +47,35 @@ import com.google.common.collect.Lists;
public class TaskMonitor {
private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
- // Don't keep around any tasks that have completed more than
- // 60 seconds ago
- private static final long EXPIRATION_TIME = 60*1000;
+ public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
+ public static final int DEFAULT_MAX_TASKS = 1000;
+ public static final String RPC_WARN_TIME_KEY = "hbase.taskmonitor.rpc.warn.time";
+ public static final long DEFAULT_RPC_WARN_TIME = 0;
+ public static final String EXPIRATION_TIME_KEY = "hbase.taskmonitor.expiration.time";
+ public static final long DEFAULT_EXPIRATION_TIME = 60*1000;
+ public static final String MONITOR_INTERVAL_KEY = "hbase.taskmonitor.monitor.interval";
+ public static final long DEFAULT_MONITOR_INTERVAL = 10*1000;
- @VisibleForTesting
- static final int MAX_TASKS = 1000;
-
private static TaskMonitor instance;
- private CircularFifoBuffer tasks = new CircularFifoBuffer(MAX_TASKS);
- private List<TaskAndWeakRefPair> rpcTasks = Lists.newArrayList();
+
+ private final int maxTasks;
+ private final long rpcWarnTime;
+ private final long expirationTime;
+ private final CircularFifoBuffer tasks;
+ private final List<TaskAndWeakRefPair> rpcTasks;
+ private final long monitorInterval;
+ private Thread monitorThread;
+
+ TaskMonitor(Configuration conf) {
+ maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
+ expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
+ rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
+ tasks = new CircularFifoBuffer(maxTasks);
+ rpcTasks = Lists.newArrayList();
+ monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
+ monitorThread = new Thread(new MonitorRunnable());
+ Threads.setDaemonThreadRunning(monitorThread, "Monitor thread for TaskMonitor");
+ }
/**
* Get singleton instance.
@@ -61,7 +83,7 @@ public class TaskMonitor {
*/
public static synchronized TaskMonitor get() {
if (instance == null) {
- instance = new TaskMonitor();
+ instance = new TaskMonitor(HBaseConfiguration.create());
}
return instance;
}
@@ -93,6 +115,22 @@ public class TaskMonitor {
return proxy;
}
+ private synchronized void warnStuckTasks() {
+ if (rpcWarnTime > 0) {
+ final long now = EnvironmentEdgeManager.currentTime();
+ for (Iterator<TaskAndWeakRefPair> it = rpcTasks.iterator();
+ it.hasNext();) {
+ TaskAndWeakRefPair pair = it.next();
+ MonitoredTask stat = pair.get();
+ if ((stat.getState() == MonitoredTaskImpl.State.RUNNING) &&
+ (now >= stat.getWarnTime() + rpcWarnTime)) {
+ LOG.warn("Task may be stuck: " + stat);
+ stat.setWarnTime(now);
+ }
+ }
+ }
+ }
+
private synchronized void purgeExpiredTasks() {
for (Iterator<TaskAndWeakRefPair> it = tasks.iterator();
it.hasNext();) {
@@ -140,12 +178,11 @@ public class TaskMonitor {
private boolean canPurge(MonitoredTask stat) {
long cts = stat.getCompletionTimestamp();
- return (cts > 0 && System.currentTimeMillis() - cts > EXPIRATION_TIME);
+ return (cts > 0 && EnvironmentEdgeManager.currentTime() - cts > expirationTime);
}
-
public void dumpAsText(PrintWriter out) {
- long now = System.currentTimeMillis();
+ long now = EnvironmentEdgeManager.currentTime();
List<MonitoredTask> tasks = getTasks();
for (MonitoredTask task : tasks) {
@@ -165,6 +202,12 @@ public class TaskMonitor {
}
}
+ public synchronized void shutdown() {
+ if (this.monitorThread != null) {
+ monitorThread.interrupt();
+ }
+ }
+
/**
* This class encapsulates an object as well as a weak reference to a proxy
* that passes through calls to that object. In art form:
@@ -219,4 +262,23 @@ public class TaskMonitor {
return method.invoke(delegatee, args);
}
}
+
+ private class MonitorRunnable implements Runnable {
+ private boolean running = true;
+
+ @Override
+ public void run() {
+ while (running) {
+ try {
+ Thread.sleep(monitorInterval);
+ if (tasks.isFull()) {
+ purgeExpiredTasks();
+ }
+ warnStuckTasks();
+ } catch (InterruptedException e) {
+ running = false;
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/685ab190/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
index d09b1d1..0914b85 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
@@ -22,7 +22,9 @@ import static org.junit.Assert.*;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -31,7 +33,7 @@ public class TestTaskMonitor {
@Test
public void testTaskMonitorBasics() {
- TaskMonitor tm = new TaskMonitor();
+ TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -54,11 +56,13 @@ public class TestTaskMonitor {
// If we mark its completion time back a few minutes, it should get gced
task.expireNow();
assertEquals(0, tm.getTasks().size());
+
+ tm.shutdown();
}
@Test
public void testTasksGetAbortedOnLeak() throws InterruptedException {
- final TaskMonitor tm = new TaskMonitor();
+ final TaskMonitor tm = new TaskMonitor(new Configuration());
assertTrue("Task monitor should start empty",
tm.getTasks().isEmpty());
@@ -85,42 +89,58 @@ public class TestTaskMonitor {
// Now it should be aborted
MonitoredTask taskFromTm = tm.getTasks().get(0);
assertEquals(MonitoredTask.State.ABORTED, taskFromTm.getState());
+
+ tm.shutdown();
}
@Test
public void testTaskLimit() throws Exception {
- TaskMonitor tm = new TaskMonitor();
- for (int i = 0; i < TaskMonitor.MAX_TASKS + 10; i++) {
+ TaskMonitor tm = new TaskMonitor(new Configuration());
+ for (int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS + 10; i++) {
tm.createStatus("task " + i);
}
// Make sure it was limited correctly
- assertEquals(TaskMonitor.MAX_TASKS, tm.getTasks().size());
+ assertEquals(TaskMonitor.DEFAULT_MAX_TASKS, tm.getTasks().size());
// Make sure we culled the earlier tasks, not later
// (i.e. tasks 0 through 9 should have been deleted)
assertEquals("task 10", tm.getTasks().get(0).getDescription());
+ tm.shutdown();
}
@Test
public void testDoNotPurgeRPCTask() throws Exception {
int RPCTaskNums = 10;
+ TaskMonitor tm = TaskMonitor.get();
for(int i = 0; i < RPCTaskNums; i++) {
- TaskMonitor.get().createRPCStatus("PRCTask" + i);
+ tm.createRPCStatus("PRCTask" + i);
}
- for(int i = 0; i < TaskMonitor.MAX_TASKS; i++) {
- TaskMonitor.get().createStatus("otherTask" + i);
+ for(int i = 0; i < TaskMonitor.DEFAULT_MAX_TASKS; i++) {
+ tm.createStatus("otherTask" + i);
}
int remainRPCTask = 0;
- for(MonitoredTask task :TaskMonitor.get().getTasks()) {
+ for(MonitoredTask task: tm.getTasks()) {
if(task instanceof MonitoredRPCHandler) {
remainRPCTask++;
}
}
assertEquals("RPC Tasks have been purged!", RPCTaskNums, remainRPCTask);
-
+ tm.shutdown();
}
-
-
+ @Test
+ public void testWarnStuckTasks() throws Exception {
+ final int INTERVAL = 1000;
+ Configuration conf = new Configuration();
+ conf.setLong(TaskMonitor.RPC_WARN_TIME_KEY, INTERVAL);
+ conf.setLong(TaskMonitor.MONITOR_INTERVAL_KEY, INTERVAL);
+ final TaskMonitor tm = new TaskMonitor(conf);
+ MonitoredRPCHandler t = tm.createRPCStatus("test task");
+ long then = EnvironmentEdgeManager.currentTime();
+ t.setRPC("testMethod", new Object[0], then);
+ Thread.sleep(INTERVAL * 2);
+ assertTrue("We did not warn", t.getWarnTime() > then);
+ tm.shutdown();
+ }
}
[07/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
HBASE-18431 Mitigate compatibility concerns between branch-1.3 and branch-1.4
- Refactor TableName into its own proto module and fix up users
- Move SnapshotDescription from Snapshot.proto back to HBase.proto
- Restore FastLongHistogram and TestFastLongHistogram;
deprecate FastLongHistogram
- Move DeleteQueryTracker back to o.a.h.h.regionserver
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e9e16b59
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e9e16b59
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e9e16b59
Branch: refs/heads/branch-1.4
Commit: e9e16b59420c5d9a47b9b014b3bb3cb3421b1de9
Parents: 8d826b8
Author: Andrew Purtell <ap...@apache.org>
Authored: Tue Aug 8 16:04:54 2017 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Tue Aug 8 16:04:54 2017 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/client/Admin.java | 3 +-
.../apache/hadoop/hbase/client/HBaseAdmin.java | 5 +-
.../hadoop/hbase/protobuf/ProtobufUtil.java | 9 +-
.../ClientSnapshotDescriptionUtils.java | 2 +-
.../snapshot/CorruptedSnapshotException.java | 2 +-
.../hbase/snapshot/HBaseSnapshotException.java | 2 +-
.../snapshot/RestoreSnapshotException.java | 2 +-
.../snapshot/SnapshotCreationException.java | 2 +-
.../snapshot/SnapshotDoesNotExistException.java | 2 +-
.../hbase/snapshot/SnapshotExistsException.java | 2 +-
.../hbase/client/TestSnapshotFromAdmin.java | 2 +-
.../hadoop/hbase/util/FastLongHistogram.java | 319 +
.../hbase/util/TestFastLongHistogram.java | 132 +
hbase-protocol/pom.xml | 1 +
.../protobuf/generated/AccessControlProtos.java | 92 +-
.../hbase/protobuf/generated/ClientProtos.java | 2 +-
.../hbase/protobuf/generated/HBaseProtos.java | 5603 ++++++++++--------
.../generated/MasterProcedureProtos.java | 586 +-
.../hbase/protobuf/generated/MasterProtos.java | 1689 +++---
.../generated/SecureBulkLoadProtos.java | 94 +-
.../protobuf/generated/SnapshotProtos.java | 1551 +----
.../hbase/protobuf/generated/TableProtos.java | 607 ++
.../hbase/protobuf/generated/WALProtos.java | 145 +-
.../protobuf/generated/ZooKeeperProtos.java | 171 +-
.../src/main/protobuf/AccessControl.proto | 2 +-
hbase-protocol/src/main/protobuf/HBase.proto | 28 +-
hbase-protocol/src/main/protobuf/Master.proto | 1 +
.../src/main/protobuf/MasterProcedure.proto | 1 +
.../src/main/protobuf/SecureBulkLoad.proto | 1 +
hbase-protocol/src/main/protobuf/Snapshot.proto | 19 -
hbase-protocol/src/main/protobuf/Table.proto | 33 +
hbase-protocol/src/main/protobuf/WAL.proto | 1 +
.../src/main/protobuf/ZooKeeper.proto | 1 +
.../hbase/tmpl/master/MasterStatusTmpl.jamon | 2 +-
.../BaseMasterAndRegionObserver.java | 2 +-
.../hbase/coprocessor/BaseMasterObserver.java | 2 +-
.../hbase/coprocessor/BaseRegionObserver.java | 2 +-
.../hbase/coprocessor/MasterObserver.java | 2 +-
.../hbase/coprocessor/RegionObserver.java | 2 +-
.../mapreduce/TableSnapshotInputFormatImpl.java | 2 +-
.../hbase/master/MasterCoprocessorHost.java | 2 +-
.../hadoop/hbase/master/MasterRpcServices.java | 4 +-
.../hadoop/hbase/master/SnapshotSentinel.java | 2 +-
.../master/snapshot/CloneSnapshotHandler.java | 2 +-
.../snapshot/DisabledTableSnapshotHandler.java | 2 +-
.../snapshot/EnabledTableSnapshotHandler.java | 2 +-
.../master/snapshot/MasterSnapshotVerifier.java | 2 +-
.../master/snapshot/RestoreSnapshotHandler.java | 2 +-
.../hbase/master/snapshot/SnapshotManager.java | 4 +-
.../master/snapshot/TakeSnapshotHandler.java | 2 +-
.../hbase/regionserver/DeleteTracker.java | 101 +
.../hadoop/hbase/regionserver/HRegion.java | 2 +-
.../regionserver/RegionCoprocessorHost.java | 2 +-
.../CompactionScanQueryMatcher.java | 1 +
.../querymatcher/DeleteTracker.java | 101 -
.../DropDeletesCompactionScanQueryMatcher.java | 1 +
.../querymatcher/LegacyScanQueryMatcher.java | 3 +-
.../MajorCompactionScanQueryMatcher.java | 1 +
.../MinorCompactionScanQueryMatcher.java | 1 +
.../NormalUserScanQueryMatcher.java | 1 +
.../querymatcher/ScanDeleteTracker.java | 1 +
.../querymatcher/ScanQueryMatcher.java | 3 +-
.../StripeCompactionScanQueryMatcher.java | 1 +
.../snapshot/FlushSnapshotSubprocedure.java | 2 +-
.../snapshot/RegionServerSnapshotManager.java | 2 +-
.../hbase/security/access/AccessController.java | 2 +-
.../visibility/VisibilityController.java | 2 +-
.../hadoop/hbase/snapshot/CreateSnapshot.java | 2 +-
.../hadoop/hbase/snapshot/ExportSnapshot.java | 2 +-
.../hbase/snapshot/RestoreSnapshotHelper.java | 2 +-
.../snapshot/SnapshotDescriptionUtils.java | 2 +-
.../hadoop/hbase/snapshot/SnapshotInfo.java | 2 +-
.../hadoop/hbase/snapshot/SnapshotManifest.java | 2 +-
.../hbase/snapshot/SnapshotManifestV1.java | 2 +-
.../hbase/snapshot/SnapshotManifestV2.java | 2 +-
.../hbase/snapshot/SnapshotReferenceUtil.java | 2 +-
.../resources/hbase-webapps/master/snapshot.jsp | 2 +-
.../hbase-webapps/master/snapshotsStats.jsp | 2 +-
.../hbase/client/TestSnapshotFromClient.java | 2 +-
.../hbase/coprocessor/TestMasterObserver.java | 2 +-
.../master/cleaner/TestSnapshotFromMaster.java | 2 +-
.../hbase/regionserver/TestHRegionInfo.java | 4 +-
.../querymatcher/TestScanDeleteTracker.java | 2 +-
.../security/access/TestAccessController.java | 2 +-
.../access/TestWithDisabledAuthorization.java | 2 +-
.../hbase/snapshot/SnapshotTestingUtils.java | 2 +-
.../hbase/snapshot/TestExportSnapshot.java | 2 +-
.../snapshot/TestFlushSnapshotFromClient.java | 2 +-
.../TestRestoreFlushSnapshotFromClient.java | 2 +-
.../snapshot/TestRestoreSnapshotHelper.java | 2 +-
.../snapshot/TestSnapshotClientRetries.java | 2 +-
.../snapshot/TestSnapshotDescriptionUtils.java | 2 +-
.../hbase/snapshot/TestSnapshotManifest.java | 2 +-
93 files changed, 5986 insertions(+), 5445 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 5b3744a..fa9594a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -43,9 +43,8 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.quotas.QuotaFilter;
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
import org.apache.hadoop.hbase.quotas.QuotaSettings;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 2cbeb9a..84871ed 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -95,6 +95,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
@@ -156,7 +157,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
import org.apache.hadoop.hbase.quotas.QuotaFilter;
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
import org.apache.hadoop.hbase.quotas.QuotaSettings;
@@ -3174,7 +3175,7 @@ public class HBaseAdmin implements Admin {
public TableName[] call(int callTimeout) throws Exception {
HBaseRpcController controller = rpcControllerFactory.newController();
controller.setCallTimeout(callTimeout);
- List<HBaseProtos.TableName> tableNames =
+ List<TableProtos.TableName> tableNames =
master.listTableNamesByNamespace(controller, ListTableNamesByNamespaceRequest.
newBuilder().setNamespaceName(name).build())
.getTableNameList();
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index b704f4a..fa44cdd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -145,6 +145,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
+import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -2975,18 +2976,18 @@ public final class ProtobufUtil {
", type=" + proto.getMutateType().toString();
}
- public static TableName toTableName(HBaseProtos.TableName tableNamePB) {
+ public static TableName toTableName(TableProtos.TableName tableNamePB) {
return TableName.valueOf(tableNamePB.getNamespace().asReadOnlyByteBuffer(),
tableNamePB.getQualifier().asReadOnlyByteBuffer());
}
- public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
- return HBaseProtos.TableName.newBuilder()
+ public static TableProtos.TableName toProtoTableName(TableName tableName) {
+ return TableProtos.TableName.newBuilder()
.setNamespace(ByteStringer.wrap(tableName.getNamespace()))
.setQualifier(ByteStringer.wrap(tableName.getQualifier())).build();
}
- public static TableName[] getTableNameArray(List<HBaseProtos.TableName> tableNamesList) {
+ public static TableName[] getTableNameArray(List<TableProtos.TableName> tableNamesList) {
if (tableNamesList == null) {
return new TableName[0];
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
index 686dbb8..bba166b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.util.Bytes;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
index a1f5911..d29c89c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
index a88d820..8889cfc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
* General exception base class for when a snapshot fails
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
index 8ed42e0..25cd583 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
* Thrown when a snapshot could not be restored due to a server-side error when restoring it.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
index d550101..324d41f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
* Thrown when a snapshot could not be created due to a server-side error when
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
index 6006136..6ba45bd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
index 0344aff..ff3cdcb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**
* Thrown when a snapshot exists but should not
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
index afa5892..6385c27 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
@@ -32,11 +32,11 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
new file mode 100644
index 0000000..2473bb4
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/FastLongHistogram.java
@@ -0,0 +1,319 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
+/**
+ * FastLongHistogram is a thread-safe class that estimate distribution of data and computes the
+ * quantiles.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class FastLongHistogram {
+
+ /**
+ * Default number of bins.
+ */
+ public static final int DEFAULT_NBINS = 255;
+
+ public static final double[] DEFAULT_QUANTILES =
+ new double[]{0.25, 0.5, 0.75, 0.90, 0.95, 0.98, 0.99, 0.999};
+
+ /**
+ * Bins is a class containing a list of buckets(or bins) for estimation histogram of some data.
+ */
+ private static class Bins {
+ private final Counter[] counts;
+ // inclusive
+ private final long binsMin;
+ // exclusive
+ private final long binsMax;
+ private final long bins10XMax;
+ private final AtomicLong min = new AtomicLong(Long.MAX_VALUE);
+ private final AtomicLong max = new AtomicLong(0L);
+
+ private final Counter count = new Counter(0);
+ private final Counter total = new Counter(0);
+
+ // set to true when any of data has been inserted to the Bins. It is set after the counts are
+ // updated.
+ private final AtomicBoolean hasData = new AtomicBoolean(false);
+
+ /**
+ * The constructor for creating a Bins without any prior data.
+ */
+ public Bins(int numBins) {
+ counts = createCounters(numBins + 3);
+ this.binsMin = 1L;
+
+ // These two numbers are total guesses
+ // and should be treated as highly suspect.
+ this.binsMax = 1000;
+ this.bins10XMax = binsMax * 10;
+ }
+
+ /**
+ * The constructor for creating a Bins with last Bins.
+ */
+ public Bins(Bins last, int numOfBins, double minQ, double maxQ) {
+ long[] values = last.getQuantiles(new double[] { minQ, maxQ });
+ long wd = values[1] - values[0] + 1;
+ // expand minQ and maxQ in two ends back assuming uniform distribution
+ this.binsMin = Math.max(0L, (long) (values[0] - wd * minQ));
+ long binsMax = (long) (values[1] + wd * (1 - maxQ)) + 1;
+ // make sure each of bins is at least of width 1
+ this.binsMax = Math.max(binsMax, this.binsMin + numOfBins);
+ this.bins10XMax = Math.max((long) (values[1] + (binsMax - 1) * 9), this.binsMax + 1);
+
+ this.counts = createCounters(numOfBins + 3);
+ }
+
+ private Counter[] createCounters(int num) {
+ Counter[] counters = new Counter[num];
+ for (int i = 0; i < num; i++) {
+ counters[i] = new Counter();
+ }
+ return counters;
+ }
+
+ private int getIndex(long value) {
+ if (value < this.binsMin) {
+ return 0;
+ } else if (value > this.bins10XMax) {
+ return this.counts.length - 1;
+ } else if (value >= this.binsMax) {
+ return this.counts.length - 2;
+ }
+ // compute the position
+ return 1 + (int) ((value - this.binsMin) * (this.counts.length - 3) /
+ (this.binsMax - this.binsMin));
+
+ }
+
+ /**
+ * Adds a value to the histogram.
+ */
+ public void add(long value, long count) {
+ if (value < 0) {
+ // The whole computation is completely thrown off if there are negative numbers
+ //
+ // Normally we would throw an IllegalArgumentException however this is the metrics
+ // system and it should be completely safe at all times.
+ // So silently throw it away.
+ return;
+ }
+ AtomicUtils.updateMin(min, value);
+ AtomicUtils.updateMax(max, value);
+
+ this.count.add(count);
+ this.total.add(value * count);
+
+ int pos = getIndex(value);
+ this.counts[pos].add(count);
+
+ // hasData needs to be updated as last
+ this.hasData.set(true);
+ }
+
+ /**
+ * Computes the quantiles give the ratios.
+ */
+ public long[] getQuantiles(double[] quantiles) {
+ if (!this.hasData.get()) {
+ // No data yet.
+ return new long[quantiles.length];
+ }
+
+ // Make a snapshot of lowerCounter, higherCounter and bins.counts to counts.
+ // This is not synchronized, but since the counter are accumulating, the result is a good
+ // estimation of a snapshot.
+ long[] counts = new long[this.counts.length];
+ long total = 0L;
+ for (int i = 0; i < this.counts.length; i++) {
+ counts[i] = this.counts[i].get();
+ total += counts[i];
+ }
+
+ int rIndex = 0;
+ double qCount = total * quantiles[0];
+ long cum = 0L;
+
+ long[] res = new long[quantiles.length];
+ countsLoop: for (int i = 0; i < counts.length; i++) {
+ // mn and mx define a value range
+ long mn, mx;
+ if (i == 0) {
+ mn = this.min.get();
+ mx = this.binsMin;
+ } else if (i == counts.length - 1) {
+ mn = this.bins10XMax;
+ mx = this.max.get();
+ } else if (i == counts.length - 2) {
+ mn = this.binsMax;
+ mx = this.bins10XMax;
+ } else {
+ mn = this.binsMin + (i - 1) * (this.binsMax - this.binsMin) / (this.counts.length - 3);
+ mx = this.binsMin + i * (this.binsMax - this.binsMin) / (this.counts.length - 3);
+ }
+
+ if (mx < this.min.get()) {
+ continue;
+ }
+ if (mn > this.max.get()) {
+ break;
+ }
+ mn = Math.max(mn, this.min.get());
+ mx = Math.min(mx, this.max.get());
+
+ // lastCum/cum are the corresponding counts to mn/mx
+ double lastCum = cum;
+ cum += counts[i];
+
+ // fill the results for qCount is within current range.
+ while (qCount <= cum) {
+ if (cum == lastCum) {
+ res[rIndex] = mn;
+ } else {
+ res[rIndex] = (long) ((qCount - lastCum) * (mx - mn) / (cum - lastCum) + mn);
+ }
+
+ // move to next quantile
+ rIndex++;
+ if (rIndex >= quantiles.length) {
+ break countsLoop;
+ }
+ qCount = total * quantiles[rIndex];
+ }
+ }
+ // In case quantiles contains values >= 100%
+ for (; rIndex < quantiles.length; rIndex++) {
+ res[rIndex] = this.max.get();
+ }
+
+ return res;
+ }
+
+
+ long getNumAtOrBelow(long val) {
+ final int targetIndex = getIndex(val);
+ long totalToCurrentIndex = 0;
+ for (int i = 0; i <= targetIndex; i++) {
+ totalToCurrentIndex += this.counts[i].get();
+ }
+ return totalToCurrentIndex;
+ }
+ }
+
+ // The bins counting values. It is replaced with a new one in calling of reset().
+ private volatile Bins bins;
+
+ /**
+ * Constructor.
+ */
+ public FastLongHistogram() {
+ this(DEFAULT_NBINS);
+ }
+
+ /**
+ * Constructor.
+ * @param numOfBins the number of bins for the histogram. A larger value results in more precise
+ * results but with lower efficiency, and vice versus.
+ */
+ public FastLongHistogram(int numOfBins) {
+ this.bins = new Bins(numOfBins);
+ }
+
+ /**
+ * Constructor setting the bins assuming a uniform distribution within a range.
+ * @param numOfBins the number of bins for the histogram. A larger value results in more precise
+ * results but with lower efficiency, and vice versus.
+ * @param min lower bound of the region, inclusive.
+ * @param max higher bound of the region, inclusive.
+ */
+ public FastLongHistogram(int numOfBins, long min, long max) {
+ this(numOfBins);
+ Bins bins = new Bins(numOfBins);
+ bins.add(min, 1);
+ bins.add(max, 1);
+ this.bins = new Bins(bins, numOfBins, 0.01, 0.999);
+ }
+
+ private FastLongHistogram(Bins bins) {
+ this.bins = bins;
+ }
+
+ /**
+ * Adds a value to the histogram.
+ */
+ public void add(long value, long count) {
+ this.bins.add(value, count);
+ }
+
+ /**
+ * Computes the quantiles give the ratios.
+ */
+ public long[] getQuantiles(double[] quantiles) {
+ return this.bins.getQuantiles(quantiles);
+ }
+
+ public long[] getQuantiles() {
+ return this.bins.getQuantiles(DEFAULT_QUANTILES);
+ }
+
+ public long getMin() {
+ long min = this.bins.min.get();
+ return min == Long.MAX_VALUE ? 0 : min; // in case it is not initialized
+ }
+
+ public long getMax() {
+ return this.bins.max.get();
+ }
+
+ public long getCount() {
+ return this.bins.count.get();
+ }
+
+ public long getMean() {
+ Bins bins = this.bins;
+ long count = bins.count.get();
+ long total = bins.total.get();
+ if (count == 0) {
+ return 0;
+ }
+ return total / count;
+ }
+
+ public long getNumAtOrBelow(long value) {
+ return this.bins.getNumAtOrBelow(value);
+ }
+
+ /**
+ * Resets the histogram for new counting.
+ */
+ public FastLongHistogram reset() {
+ Bins oldBins = this.bins;
+ this.bins = new Bins(this.bins, this.bins.counts.length - 3, 0.01, 0.99);
+ return new FastLongHistogram(oldBins);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestFastLongHistogram.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestFastLongHistogram.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestFastLongHistogram.java
new file mode 100644
index 0000000..7304e2d
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestFastLongHistogram.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.Arrays;
+import java.util.Random;
+
+
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Testcases for FastLongHistogram.
+ */
+@Category(SmallTests.class)
+public class TestFastLongHistogram {
+
+ private static void doTestUniform(FastLongHistogram hist) {
+ long[] VALUES = { 0, 10, 20, 30, 40, 50 };
+ double[] qs = new double[VALUES.length];
+ for (int i = 0; i < qs.length; i++) {
+ qs[i] = (double) VALUES[i] / VALUES[VALUES.length - 1];
+ }
+
+ for (int i = 0; i < 10; i++) {
+ for (long v : VALUES) {
+ hist.add(v, 1);
+ }
+ long[] vals = hist.getQuantiles(qs);
+ System.out.println(Arrays.toString(vals));
+ for (int j = 0; j < qs.length; j++) {
+ Assert.assertTrue(j + "-th element org: " + VALUES[j] + ", act: " + vals[j],
+ Math.abs(vals[j] - VALUES[j]) <= 10);
+ }
+ hist.reset();
+ }
+ }
+
+ @Test
+ public void testUniform() {
+ FastLongHistogram hist = new FastLongHistogram(100, 0, 50);
+ doTestUniform(hist);
+ }
+
+ @Test
+ public void testAdaptionOfChange() {
+ // assumes the uniform distribution
+ FastLongHistogram hist = new FastLongHistogram(100, 0, 100);
+
+ Random rand = new Random();
+
+ for (int n = 0; n < 10; n++) {
+ for (int i = 0; i < 900; i++) {
+ hist.add(rand.nextInt(100), 1);
+ }
+
+ // add 10% outliers, this breaks the assumption, hope bin10xMax works
+ for (int i = 0; i < 100; i++) {
+ hist.add(1000 + rand.nextInt(100), 1);
+ }
+
+ long[] vals = hist.getQuantiles(new double[] { 0.25, 0.75, 0.95 });
+ System.out.println(Arrays.toString(vals));
+ if (n == 0) {
+ Assert.assertTrue("Out of possible value", vals[0] >= 0 && vals[0] <= 50);
+ Assert.assertTrue("Out of possible value", vals[1] >= 50 && vals[1] <= 100);
+ Assert.assertTrue("Out of possible value", vals[2] >= 900 && vals[2] <= 1100);
+ }
+
+ hist.reset();
+ }
+ }
+
+
+ @Test
+ public void testGetNumAtOrBelow() {
+ long[] VALUES = { 1, 10, 20, 30, 40, 50 };
+
+ FastLongHistogram h = new FastLongHistogram();
+ for (long v : VALUES) {
+ for (int i = 0; i < 100; i++) {
+ h.add(v, 1);
+ }
+ }
+
+ h.add(Integer.MAX_VALUE, 1);
+
+ h.reset();
+
+ for (long v : VALUES) {
+ for (int i = 0; i < 100; i++) {
+ h.add(v, 1);
+ }
+ }
+ // Add something way out there to make sure it doesn't throw off the counts.
+ h.add(Integer.MAX_VALUE, 1);
+
+ assertEquals(100, h.getNumAtOrBelow(1));
+ assertEquals(200, h.getNumAtOrBelow(11));
+ assertEquals(601, h.getNumAtOrBelow(Long.MAX_VALUE));
+ }
+
+
+ @Test
+ public void testSameValues() {
+ FastLongHistogram hist = new FastLongHistogram(100);
+
+ hist.add(50, 100);
+
+ hist.reset();
+ doTestUniform(hist);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index 9424691..496b3cf 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -197,6 +197,7 @@
<include>RowProcessor.proto</include>
<include>SecureBulkLoad.proto</include>
<include>Snapshot.proto</include>
+ <include>Table.proto</include>
<include>Tracing.proto</include>
<include>VisibilityLabels.proto</include>
<include>WAL.proto</include>
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
index b72e6e5..161282c 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
@@ -1285,11 +1285,11 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional bytes family = 2;
/**
@@ -1377,11 +1377,11 @@ public final class AccessControlProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -1477,7 +1477,7 @@ public final class AccessControlProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -1487,13 +1487,13 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -1552,7 +1552,7 @@ public final class AccessControlProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
family_ = com.google.protobuf.ByteString.EMPTY;
qualifier_ = com.google.protobuf.ByteString.EMPTY;
action_ = java.util.Collections.emptyList();
@@ -1797,7 +1797,7 @@ public final class AccessControlProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -1926,9 +1926,9 @@ public final class AccessControlProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -1938,7 +1938,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -1948,7 +1948,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -1965,7 +1965,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -1978,12 +1978,12 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -1999,7 +1999,7 @@ public final class AccessControlProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -2010,7 +2010,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -2018,7 +2018,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -2029,11 +2029,11 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -7462,11 +7462,11 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional bytes namespace_name = 3;
/**
@@ -7541,11 +7541,11 @@ public final class AccessControlProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -7616,7 +7616,7 @@ public final class AccessControlProtos {
// optional .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
@@ -7626,13 +7626,13 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -7654,7 +7654,7 @@ public final class AccessControlProtos {
private void initFields() {
type_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Type.Global;
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
namespaceName_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@@ -7881,7 +7881,7 @@ public final class AccessControlProtos {
type_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Type.Global;
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -8027,9 +8027,9 @@ public final class AccessControlProtos {
}
// optional .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
@@ -8039,7 +8039,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -8049,7 +8049,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -8066,7 +8066,7 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -8079,12 +8079,12 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -8100,7 +8100,7 @@ public final class AccessControlProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -8111,7 +8111,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -8119,7 +8119,7 @@ public final class AccessControlProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -8130,11 +8130,11 @@ public final class AccessControlProtos {
* <code>optional .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -10499,7 +10499,7 @@ public final class AccessControlProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\023AccessControl.proto\022\010hbase.pb\032\013HBase.p" +
+ "\n\023AccessControl.proto\022\010hbase.pb\032\013Table.p" +
"roto\"\314\002\n\nPermission\022\'\n\004type\030\001 \002(\0162\031.hbas" +
"e.pb.Permission.Type\0225\n\021global_permissio" +
"n\030\002 \001(\0132\032.hbase.pb.GlobalPermission\022;\n\024n" +
@@ -10648,7 +10648,7 @@ public final class AccessControlProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
}, assigner);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index 12950f7..4bd3f5f 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -22646,7 +22646,7 @@ public final class ClientProtos {
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Cursor, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Cursor.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CursorOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Cursor, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Cursor.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CursorOrBuilder>
getCursorFieldBuilder() {
if (cursorBuilder_ == null) {
cursorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
[02/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TableProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TableProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TableProtos.java
new file mode 100644
index 0000000..9507d01
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TableProtos.java
@@ -0,0 +1,607 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Table.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class TableProtos {
+ private TableProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface TableNameOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes namespace = 1;
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ boolean hasNamespace();
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ com.google.protobuf.ByteString getNamespace();
+
+ // required bytes qualifier = 2;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ boolean hasQualifier();
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ com.google.protobuf.ByteString getQualifier();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.TableName}
+ *
+ * <pre>
+ **
+ * Table Name
+ * </pre>
+ */
+ public static final class TableName extends
+ com.google.protobuf.GeneratedMessage
+ implements TableNameOrBuilder {
+ // Use TableName.newBuilder() to construct.
+ private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TableName defaultInstance;
+ public static TableName getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TableName getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private TableName(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ namespace_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ qualifier_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<TableName> PARSER =
+ new com.google.protobuf.AbstractParser<TableName>() {
+ public TableName parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TableName(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<TableName> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required bytes namespace = 1;
+ public static final int NAMESPACE_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString namespace_;
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public boolean hasNamespace() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public com.google.protobuf.ByteString getNamespace() {
+ return namespace_;
+ }
+
+ // required bytes qualifier = 2;
+ public static final int QUALIFIER_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString qualifier_;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ private void initFields() {
+ namespace_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasNamespace()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasQualifier()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, namespace_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, qualifier_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, namespace_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, qualifier_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName other = (org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) obj;
+
+ boolean result = true;
+ result = result && (hasNamespace() == other.hasNamespace());
+ if (hasNamespace()) {
+ result = result && getNamespace()
+ .equals(other.getNamespace());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasNamespace()) {
+ hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
+ hash = (53 * hash) + getNamespace().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.TableName}
+ *
+ * <pre>
+ **
+ * Table Name
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ namespace_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName build() {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName result = new org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.namespace_ = namespace_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.qualifier_ = qualifier_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) return this;
+ if (other.hasNamespace()) {
+ setNamespace(other.getNamespace());
+ }
+ if (other.hasQualifier()) {
+ setQualifier(other.getQualifier());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasNamespace()) {
+
+ return false;
+ }
+ if (!hasQualifier()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required bytes namespace = 1;
+ private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public boolean hasNamespace() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public com.google.protobuf.ByteString getNamespace() {
+ return namespace_;
+ }
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public Builder setNamespace(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ namespace_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes namespace = 1;</code>
+ */
+ public Builder clearNamespace() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ namespace_ = getDefaultInstance().getNamespace();
+ onChanged();
+ return this;
+ }
+
+ // required bytes qualifier = 2;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.TableName)
+ }
+
+ static {
+ defaultInstance = new TableName(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.TableName)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_TableName_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_TableName_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\013Table.proto\022\010hbase.pb\"1\n\tTableName\022\021\n\t" +
+ "namespace\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014B>\n*or" +
+ "g.apache.hadoop.hbase.protobuf.generated" +
+ "B\013TableProtosH\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_TableName_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_TableName_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_TableName_descriptor,
+ new java.lang.String[] { "Namespace", "Qualifier", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
index 9bae06f..a466e6c 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
@@ -8813,11 +8813,11 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required bytes encoded_region_name = 2;
/**
@@ -8921,11 +8921,11 @@ public final class WALProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -8996,7 +8996,7 @@ public final class WALProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -9006,13 +9006,13 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -9085,7 +9085,7 @@ public final class WALProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
stores_ = java.util.Collections.emptyList();
bulkloadSeqNum_ = 0L;
@@ -9347,7 +9347,7 @@ public final class WALProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -9516,9 +9516,9 @@ public final class WALProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -9528,7 +9528,7 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -9538,7 +9538,7 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -9555,7 +9555,7 @@ public final class WALProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -9568,12 +9568,12 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -9589,7 +9589,7 @@ public final class WALProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -9600,7 +9600,7 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -9608,7 +9608,7 @@ public final class WALProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -9619,11 +9619,11 @@ public final class WALProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -11978,56 +11978,56 @@ public final class WALProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\tWAL.proto\022\010hbase.pb\032\013HBase.proto\032\014Clie" +
- "nt.proto\"\217\001\n\tWALHeader\022\027\n\017has_compressio" +
- "n\030\001 \001(\010\022\026\n\016encryption_key\030\002 \001(\014\022\033\n\023has_t" +
- "ag_compression\030\003 \001(\010\022\027\n\017writer_cls_name\030" +
- "\004 \001(\t\022\033\n\023cell_codec_cls_name\030\005 \001(\t\"\273\002\n\006W" +
- "ALKey\022\033\n\023encoded_region_name\030\001 \002(\014\022\022\n\nta" +
- "ble_name\030\002 \002(\014\022\033\n\023log_sequence_number\030\003 " +
- "\002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\ncluster_id\030\005 " +
- "\001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132" +
- "\025.hbase.pb.FamilyScope\022\032\n\022following_kv_c",
- "ount\030\007 \001(\r\022#\n\013cluster_ids\030\010 \003(\0132\016.hbase." +
- "pb.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001" +
- "(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"F\n\013Fami" +
- "lyScope\022\016\n\006family\030\001 \002(\014\022\'\n\nscope_type\030\002 " +
- "\002(\0162\023.hbase.pb.ScopeType\"\276\001\n\024CompactionD" +
- "escriptor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded" +
- "_region_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022" +
- "\030\n\020compaction_input\030\004 \003(\t\022\031\n\021compaction_" +
- "output\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013" +
- "region_name\030\007 \001(\014\"\244\003\n\017FlushDescriptor\0225\n",
- "\006action\030\001 \002(\0162%.hbase.pb.FlushDescriptor" +
- ".FlushAction\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023enco" +
- "ded_region_name\030\003 \002(\014\022\035\n\025flush_sequence_" +
- "number\030\004 \001(\004\022E\n\rstore_flushes\030\005 \003(\0132..hb" +
- "ase.pb.FlushDescriptor.StoreFlushDescrip" +
- "tor\022\023\n\013region_name\030\006 \001(\014\032Y\n\024StoreFlushDe" +
- "scriptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_h" +
- "ome_dir\030\002 \002(\t\022\024\n\014flush_output\030\003 \003(\t\"S\n\013F" +
- "lushAction\022\017\n\013START_FLUSH\020\000\022\020\n\014COMMIT_FL" +
- "USH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003",
- "\"q\n\017StoreDescriptor\022\023\n\013family_name\030\001 \002(\014" +
- "\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n\nstore_file\030\003" +
- " \003(\t\022\035\n\025store_file_size_bytes\030\004 \001(\004\"\237\001\n\022" +
- "BulkLoadDescriptor\022\'\n\ntable_name\030\001 \002(\0132\023" +
- ".hbase.pb.TableName\022\033\n\023encoded_region_na" +
- "me\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031.hbase.pb.Stor" +
- "eDescriptor\022\030\n\020bulkload_seq_num\030\004 \002(\003\"\272\002" +
- "\n\025RegionEventDescriptor\022=\n\nevent_type\030\001 " +
- "\002(\0162).hbase.pb.RegionEventDescriptor.Eve" +
- "ntType\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_re",
- "gion_name\030\003 \002(\014\022\033\n\023log_sequence_number\030\004" +
- " \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbase.pb.StoreDes" +
- "criptor\022$\n\006server\030\006 \001(\0132\024.hbase.pb.Serve" +
- "rName\022\023\n\013region_name\030\007 \001(\014\".\n\tEventType\022" +
- "\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWA" +
- "LTrailer*d\n\tScopeType\022\033\n\027REPLICATION_SCO" +
- "PE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLOBAL\020\001" +
- "\022\034\n\030REPLICATION_SCOPE_SERIAL\020\002B?\n*org.ap" +
- "ache.hadoop.hbase.protobuf.generatedB\tWA" +
- "LProtosH\001\210\001\000\240\001\001"
+ "\n\tWAL.proto\022\010hbase.pb\032\013Table.proto\032\013HBas" +
+ "e.proto\032\014Client.proto\"\217\001\n\tWALHeader\022\027\n\017h" +
+ "as_compression\030\001 \001(\010\022\026\n\016encryption_key\030\002" +
+ " \001(\014\022\033\n\023has_tag_compression\030\003 \001(\010\022\027\n\017wri" +
+ "ter_cls_name\030\004 \001(\t\022\033\n\023cell_codec_cls_nam" +
+ "e\030\005 \001(\t\"\273\002\n\006WALKey\022\033\n\023encoded_region_nam" +
+ "e\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023log_seque" +
+ "nce_number\030\003 \002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\n" +
+ "cluster_id\030\005 \001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006" +
+ "scopes\030\006 \003(\0132\025.hbase.pb.FamilyScope\022\032\n\022f",
+ "ollowing_kv_count\030\007 \001(\r\022#\n\013cluster_ids\030\010" +
+ " \003(\0132\016.hbase.pb.UUID\022\022\n\nnonceGroup\030\t \001(\004" +
+ "\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_sequence_number\030" +
+ "\013 \001(\004\"F\n\013FamilyScope\022\016\n\006family\030\001 \002(\014\022\'\n\n" +
+ "scope_type\030\002 \002(\0162\023.hbase.pb.ScopeType\"\276\001" +
+ "\n\024CompactionDescriptor\022\022\n\ntable_name\030\001 \002" +
+ "(\014\022\033\n\023encoded_region_name\030\002 \002(\014\022\023\n\013famil" +
+ "y_name\030\003 \002(\014\022\030\n\020compaction_input\030\004 \003(\t\022\031" +
+ "\n\021compaction_output\030\005 \003(\t\022\026\n\016store_home_" +
+ "dir\030\006 \002(\t\022\023\n\013region_name\030\007 \001(\014\"\244\003\n\017Flush",
+ "Descriptor\0225\n\006action\030\001 \002(\0162%.hbase.pb.Fl" +
+ "ushDescriptor.FlushAction\022\022\n\ntable_name\030" +
+ "\002 \002(\014\022\033\n\023encoded_region_name\030\003 \002(\014\022\035\n\025fl" +
+ "ush_sequence_number\030\004 \001(\004\022E\n\rstore_flush" +
+ "es\030\005 \003(\0132..hbase.pb.FlushDescriptor.Stor" +
+ "eFlushDescriptor\022\023\n\013region_name\030\006 \001(\014\032Y\n" +
+ "\024StoreFlushDescriptor\022\023\n\013family_name\030\001 \002" +
+ "(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\024\n\014flush_outp" +
+ "ut\030\003 \003(\t\"S\n\013FlushAction\022\017\n\013START_FLUSH\020\000" +
+ "\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014C",
+ "ANNOT_FLUSH\020\003\"q\n\017StoreDescriptor\022\023\n\013fami" +
+ "ly_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n" +
+ "\nstore_file\030\003 \003(\t\022\035\n\025store_file_size_byt" +
+ "es\030\004 \001(\004\"\237\001\n\022BulkLoadDescriptor\022\'\n\ntable" +
+ "_name\030\001 \002(\0132\023.hbase.pb.TableName\022\033\n\023enco" +
+ "ded_region_name\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031." +
+ "hbase.pb.StoreDescriptor\022\030\n\020bulkload_seq" +
+ "_num\030\004 \002(\003\"\272\002\n\025RegionEventDescriptor\022=\n\n" +
+ "event_type\030\001 \002(\0162).hbase.pb.RegionEventD" +
+ "escriptor.EventType\022\022\n\ntable_name\030\002 \002(\014\022",
+ "\033\n\023encoded_region_name\030\003 \002(\014\022\033\n\023log_sequ" +
+ "ence_number\030\004 \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbas" +
+ "e.pb.StoreDescriptor\022$\n\006server\030\006 \001(\0132\024.h" +
+ "base.pb.ServerName\022\023\n\013region_name\030\007 \001(\014\"" +
+ ".\n\tEventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_" +
+ "CLOSE\020\001\"\014\n\nWALTrailer*d\n\tScopeType\022\033\n\027RE" +
+ "PLICATION_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_S" +
+ "COPE_GLOBAL\020\001\022\034\n\030REPLICATION_SCOPE_SERIA" +
+ "L\020\002B?\n*org.apache.hadoop.hbase.protobuf." +
+ "generatedB\tWALProtosH\001\210\001\000\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -12100,6 +12100,7 @@ public final class WALProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
}, assigner);
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
index fb06a78..fc181a8 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java
@@ -5043,11 +5043,11 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// repeated bytes families = 2;
/**
@@ -5115,11 +5115,11 @@ public final class ZooKeeperProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -5180,7 +5180,7 @@ public final class ZooKeeperProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -5190,13 +5190,13 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -5224,7 +5224,7 @@ public final class ZooKeeperProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
families_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
@@ -5435,7 +5435,7 @@ public final class ZooKeeperProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -5546,9 +5546,9 @@ public final class ZooKeeperProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -5558,7 +5558,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -5568,7 +5568,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -5585,7 +5585,7 @@ public final class ZooKeeperProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -5598,12 +5598,12 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -5619,7 +5619,7 @@ public final class ZooKeeperProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -5630,7 +5630,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -5638,7 +5638,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -5649,11 +5649,11 @@ public final class ZooKeeperProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -9260,11 +9260,11 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional .hbase.pb.ServerName lock_owner = 2;
/**
@@ -9382,11 +9382,11 @@ public final class ZooKeeperProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -9469,7 +9469,7 @@ public final class ZooKeeperProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -9479,13 +9479,13 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -9603,7 +9603,7 @@ public final class ZooKeeperProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
threadId_ = 0L;
isShared_ = false;
@@ -9892,7 +9892,7 @@ public final class ZooKeeperProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -10047,9 +10047,9 @@ public final class ZooKeeperProtos {
private int bitField0_;
// optional .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
@@ -10059,7 +10059,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -10069,7 +10069,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -10086,7 +10086,7 @@ public final class ZooKeeperProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -10099,12 +10099,12 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -10120,7 +10120,7 @@ public final class ZooKeeperProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -10131,7 +10131,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -10139,7 +10139,7 @@ public final class ZooKeeperProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -10150,11 +10150,11 @@ public final class ZooKeeperProtos {
* <code>optional .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -10982,47 +10982,47 @@ public final class ZooKeeperProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\017ZooKeeper.proto\022\010hbase.pb\032\013HBase.proto" +
- "\032\023ClusterStatus.proto\"y\n\020MetaRegionServe" +
- "r\022$\n\006server\030\001 \002(\0132\024.hbase.pb.ServerName\022" +
- "\023\n\013rpc_version\030\002 \001(\r\022*\n\005state\030\003 \001(\0162\033.hb" +
- "ase.pb.RegionState.State\"V\n\006Master\022$\n\006ma" +
- "ster\030\001 \002(\0132\024.hbase.pb.ServerName\022\023\n\013rpc_" +
- "version\030\002 \001(\r\022\021\n\tinfo_port\030\003 \001(\r\"\037\n\tClus" +
- "terUp\022\022\n\nstart_date\030\001 \002(\t\"\221\001\n\020RegionTran" +
- "sition\022\027\n\017event_type_code\030\001 \002(\r\022\023\n\013regio" +
- "n_name\030\002 \002(\014\022\023\n\013create_time\030\003 \002(\004\022)\n\013ser",
- "ver_name\030\004 \002(\0132\024.hbase.pb.ServerName\022\017\n\007" +
- "payload\030\005 \001(\014\"\247\002\n\014SplitLogTask\022+\n\005state\030" +
- "\001 \002(\0162\034.hbase.pb.SplitLogTask.State\022)\n\013s" +
- "erver_name\030\002 \002(\0132\024.hbase.pb.ServerName\022:" +
- "\n\004mode\030\003 \001(\0162#.hbase.pb.SplitLogTask.Rec" +
- "overyMode:\007UNKNOWN\"C\n\005State\022\016\n\nUNASSIGNE" +
- "D\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020\002\022\010\n\004DONE\020\003\022\007" +
- "\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007UNKNOWN\020\000\022\021\n\r" +
- "LOG_SPLITTING\020\001\022\016\n\nLOG_REPLAY\020\002\"w\n\005Table" +
- "\022-\n\005state\030\001 \002(\0162\025.hbase.pb.Table.State:\007",
- "ENABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLE" +
- "D\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"D\n\007Tabl" +
- "eCF\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.pb.Table" +
- "Name\022\020\n\010families\030\002 \003(\014\"\330\001\n\017ReplicationPe" +
- "er\022\022\n\nclusterkey\030\001 \002(\t\022\037\n\027replicationEnd" +
- "pointImpl\030\002 \001(\t\022&\n\004data\030\003 \003(\0132\030.hbase.pb" +
- ".BytesBytesPair\022/\n\rconfiguration\030\004 \003(\0132\030" +
- ".hbase.pb.NameStringPair\022$\n\ttable_cfs\030\005 " +
- "\003(\0132\021.hbase.pb.TableCF\022\021\n\tbandwidth\030\006 \001(" +
- "\003\"g\n\020ReplicationState\022/\n\005state\030\001 \002(\0162 .h",
- "base.pb.ReplicationState.State\"\"\n\005State\022" +
- "\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020\001\"+\n\027Replicatio" +
- "nHLogPosition\022\020\n\010position\030\001 \002(\003\"%\n\017Repli" +
- "cationLock\022\022\n\nlock_owner\030\001 \002(\t\"\252\001\n\tTable" +
- "Lock\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.pb.Tabl" +
- "eName\022(\n\nlock_owner\030\002 \001(\0132\024.hbase.pb.Ser" +
- "verName\022\021\n\tthread_id\030\003 \001(\003\022\021\n\tis_shared\030" +
- "\004 \001(\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013create_time\030\006 " +
- "\001(\003\"\036\n\013SwitchState\022\017\n\007enabled\030\001 \001(\010BE\n*o" +
- "rg.apache.hadoop.hbase.protobuf.generate",
- "dB\017ZooKeeperProtosH\001\210\001\001\240\001\001"
+ "\n\017ZooKeeper.proto\022\010hbase.pb\032\013Table.proto" +
+ "\032\013HBase.proto\032\023ClusterStatus.proto\"y\n\020Me" +
+ "taRegionServer\022$\n\006server\030\001 \002(\0132\024.hbase.p" +
+ "b.ServerName\022\023\n\013rpc_version\030\002 \001(\r\022*\n\005sta" +
+ "te\030\003 \001(\0162\033.hbase.pb.RegionState.State\"V\n" +
+ "\006Master\022$\n\006master\030\001 \002(\0132\024.hbase.pb.Serve" +
+ "rName\022\023\n\013rpc_version\030\002 \001(\r\022\021\n\tinfo_port\030" +
+ "\003 \001(\r\"\037\n\tClusterUp\022\022\n\nstart_date\030\001 \002(\t\"\221" +
+ "\001\n\020RegionTransition\022\027\n\017event_type_code\030\001" +
+ " \002(\r\022\023\n\013region_name\030\002 \002(\014\022\023\n\013create_time",
+ "\030\003 \002(\004\022)\n\013server_name\030\004 \002(\0132\024.hbase.pb.S" +
+ "erverName\022\017\n\007payload\030\005 \001(\014\"\247\002\n\014SplitLogT" +
+ "ask\022+\n\005state\030\001 \002(\0162\034.hbase.pb.SplitLogTa" +
+ "sk.State\022)\n\013server_name\030\002 \002(\0132\024.hbase.pb" +
+ ".ServerName\022:\n\004mode\030\003 \001(\0162#.hbase.pb.Spl" +
+ "itLogTask.RecoveryMode:\007UNKNOWN\"C\n\005State" +
+ "\022\016\n\nUNASSIGNED\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020" +
+ "\002\022\010\n\004DONE\020\003\022\007\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007" +
+ "UNKNOWN\020\000\022\021\n\rLOG_SPLITTING\020\001\022\016\n\nLOG_REPL" +
+ "AY\020\002\"w\n\005Table\022-\n\005state\030\001 \002(\0162\025.hbase.pb.",
+ "Table.State:\007ENABLED\"?\n\005State\022\013\n\007ENABLED" +
+ "\020\000\022\014\n\010DISABLED\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABL" +
+ "ING\020\003\"D\n\007TableCF\022\'\n\ntable_name\030\001 \001(\0132\023.h" +
+ "base.pb.TableName\022\020\n\010families\030\002 \003(\014\"\330\001\n\017" +
+ "ReplicationPeer\022\022\n\nclusterkey\030\001 \002(\t\022\037\n\027r" +
+ "eplicationEndpointImpl\030\002 \001(\t\022&\n\004data\030\003 \003" +
+ "(\0132\030.hbase.pb.BytesBytesPair\022/\n\rconfigur" +
+ "ation\030\004 \003(\0132\030.hbase.pb.NameStringPair\022$\n" +
+ "\ttable_cfs\030\005 \003(\0132\021.hbase.pb.TableCF\022\021\n\tb" +
+ "andwidth\030\006 \001(\003\"g\n\020ReplicationState\022/\n\005st",
+ "ate\030\001 \002(\0162 .hbase.pb.ReplicationState.St" +
+ "ate\"\"\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020\001\"" +
+ "+\n\027ReplicationHLogPosition\022\020\n\010position\030\001" +
+ " \002(\003\"%\n\017ReplicationLock\022\022\n\nlock_owner\030\001 " +
+ "\002(\t\"\252\001\n\tTableLock\022\'\n\ntable_name\030\001 \001(\0132\023." +
+ "hbase.pb.TableName\022(\n\nlock_owner\030\002 \001(\0132\024" +
+ ".hbase.pb.ServerName\022\021\n\tthread_id\030\003 \001(\003\022" +
+ "\021\n\tis_shared\030\004 \001(\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013c" +
+ "reate_time\030\006 \001(\003\"\036\n\013SwitchState\022\017\n\007enabl" +
+ "ed\030\001 \001(\010BE\n*org.apache.hadoop.hbase.prot",
+ "obuf.generatedB\017ZooKeeperProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -11113,6 +11113,7 @@ public final class ZooKeeperProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(),
}, assigner);
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/AccessControl.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/AccessControl.proto b/hbase-protocol/src/main/protobuf/AccessControl.proto
index cc0d4a5..02e3a5b 100644
--- a/hbase-protocol/src/main/protobuf/AccessControl.proto
+++ b/hbase-protocol/src/main/protobuf/AccessControl.proto
@@ -23,7 +23,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
-import "HBase.proto";
+import "Table.proto";
message Permission {
enum Action {
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/HBase.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/HBase.proto b/hbase-protocol/src/main/protobuf/HBase.proto
index 279eb39..67645d4 100644
--- a/hbase-protocol/src/main/protobuf/HBase.proto
+++ b/hbase-protocol/src/main/protobuf/HBase.proto
@@ -25,14 +25,8 @@ option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
import "Cell.proto";
-
-/**
- * Table Name
- */
-message TableName {
- required bytes namespace = 1;
- required bytes qualifier = 2;
-}
+import "Table.proto";
+import "AccessControl.proto";
/**
* Table Schema
@@ -221,3 +215,21 @@ message RegionServerInfo {
optional int32 infoPort = 1;
optional VersionInfo version_info = 2;
}
+
+/**
+ * Description of the snapshot to take
+ */
+message SnapshotDescription {
+ required string name = 1;
+ optional string table = 2; // not needed for delete, but checked for in taking snapshot
+ optional int64 creation_time = 3 [default = 0];
+ enum Type {
+ DISABLED = 0;
+ FLUSH = 1;
+ SKIPFLUSH = 2;
+ }
+ optional Type type = 4 [default = FLUSH];
+ optional int32 version = 5;
+ optional string owner = 6;
+ optional UsersAndPermissions users_and_permissions = 7;
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index d19856b..4db8779 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -26,6 +26,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "Table.proto";
import "HBase.proto";
import "Client.proto";
import "ClusterStatus.proto";
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/MasterProcedure.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/MasterProcedure.proto b/hbase-protocol/src/main/protobuf/MasterProcedure.proto
index 2d2aff4..c1d3789 100644
--- a/hbase-protocol/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol/src/main/protobuf/MasterProcedure.proto
@@ -23,6 +23,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "Table.proto";
import "HBase.proto";
import "RPC.proto";
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto
index 814735b..12e7cf7 100644
--- a/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto
+++ b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto
@@ -23,6 +23,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import 'Table.proto';
import 'HBase.proto';
import 'Client.proto';
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/Snapshot.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Snapshot.proto b/hbase-protocol/src/main/protobuf/Snapshot.proto
index 015787d..ae1a1e6 100644
--- a/hbase-protocol/src/main/protobuf/Snapshot.proto
+++ b/hbase-protocol/src/main/protobuf/Snapshot.proto
@@ -23,28 +23,9 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
-import "AccessControl.proto";
import "FS.proto";
import "HBase.proto";
-/**
- * Description of the snapshot to take
- */
-message SnapshotDescription {
- required string name = 1;
- optional string table = 2; // not needed for delete, but checked for in taking snapshot
- optional int64 creation_time = 3 [default = 0];
- enum Type {
- DISABLED = 0;
- FLUSH = 1;
- SKIPFLUSH = 2;
- }
- optional Type type = 4 [default = FLUSH];
- optional int32 version = 5;
- optional string owner = 6;
- optional UsersAndPermissions users_and_permissions = 7;
-}
-
message SnapshotFileInfo {
enum Type {
HFILE = 1;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/Table.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Table.proto b/hbase-protocol/src/main/protobuf/Table.proto
new file mode 100644
index 0000000..4452eb2
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/Table.proto
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file contains protocol buffers that are shared throughout HBase
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "TableProtos";
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+/**
+ * Table Name
+ */
+message TableName {
+ required bytes namespace = 1;
+ required bytes qualifier = 2;
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/WAL.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/WAL.proto b/hbase-protocol/src/main/protobuf/WAL.proto
index 5d91c4c..08925f8 100644
--- a/hbase-protocol/src/main/protobuf/WAL.proto
+++ b/hbase-protocol/src/main/protobuf/WAL.proto
@@ -23,6 +23,7 @@ option java_generic_services = false;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "Table.proto";
import "HBase.proto";
import "Client.proto";
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/protobuf/ZooKeeper.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/ZooKeeper.proto b/hbase-protocol/src/main/protobuf/ZooKeeper.proto
index a632552..1638bf7 100644
--- a/hbase-protocol/src/main/protobuf/ZooKeeper.proto
+++ b/hbase-protocol/src/main/protobuf/ZooKeeper.proto
@@ -26,6 +26,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "Table.proto";
import "HBase.proto";
import "ClusterStatus.proto";
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 5803297..0ecc131 100644
--- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -49,7 +49,7 @@ org.apache.hadoop.hbase.HTableDescriptor;
org.apache.hadoop.hbase.HBaseConfiguration;
org.apache.hadoop.hbase.TableName;
org.apache.hadoop.hbase.tool.Canary;
-org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
+org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
org.apache.hadoop.hbase.master.DeadServer;
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
index b2f76d2..2d8fdba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import java.io.IOException;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
index f747599..5fe80c0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import java.io.IOException;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
index d6afec0..3fb0858 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
@@ -56,7 +57,6 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index 7558147..24c62b2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Defines coprocessor hooks for interacting with operations on the
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index ad56231..ad16b97 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Region.Operation;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -55,7 +56,6 @@ import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index 9f52850..a8d387a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index c7dd282..226ba06 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Provides the coprocessor framework and environment for master oriented
[04/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 2661dc1..b3ae957 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -101,11 +101,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required .hbase.pb.ColumnFamilySchema column_families = 2;
/**
@@ -193,11 +193,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -270,7 +270,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -280,13 +280,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -345,7 +345,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
@@ -599,7 +599,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -738,9 +738,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -750,7 +750,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -760,7 +760,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -777,7 +777,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -790,12 +790,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -811,7 +811,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -822,7 +822,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -830,7 +830,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -841,11 +841,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -1397,11 +1397,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required bytes column_name = 2;
/**
@@ -1485,11 +1485,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -1554,7 +1554,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -1564,13 +1564,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -1623,7 +1623,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnName_ = com.google.protobuf.ByteString.EMPTY;
nonceGroup_ = 0L;
nonce_ = 0L;
@@ -1872,7 +1872,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -1999,9 +1999,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -2011,7 +2011,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -2021,7 +2021,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -2038,7 +2038,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -2051,12 +2051,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -2072,7 +2072,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -2083,7 +2083,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -2091,7 +2091,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -2102,11 +2102,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -2577,11 +2577,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required .hbase.pb.ColumnFamilySchema column_families = 2;
/**
@@ -2669,11 +2669,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -2746,7 +2746,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -2756,13 +2756,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -2821,7 +2821,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
@@ -3075,7 +3075,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -3214,9 +3214,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -3226,7 +3226,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -3236,7 +3236,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3253,7 +3253,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -3266,12 +3266,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -3287,7 +3287,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -3298,7 +3298,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -3306,7 +3306,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -3317,11 +3317,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -10319,11 +10319,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional uint64 nonce_group = 2 [default = 0];
/**
@@ -10397,11 +10397,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -10461,7 +10461,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -10471,13 +10471,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -10514,7 +10514,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
}
@@ -10742,7 +10742,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -10856,9 +10856,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -10868,7 +10868,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -10878,7 +10878,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -10895,7 +10895,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -10908,12 +10908,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -10929,7 +10929,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -10940,7 +10940,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -10948,7 +10948,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -10959,11 +10959,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -11494,11 +11494,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional bool preserveSplits = 2 [default = false];
/**
@@ -11582,11 +11582,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -11651,7 +11651,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName tableName = 1;
public static final int TABLENAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
@@ -11661,13 +11661,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -11720,7 +11720,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
preserveSplits_ = false;
nonceGroup_ = 0L;
nonce_ = 0L;
@@ -11965,7 +11965,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -12088,9 +12088,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName tableName = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
@@ -12100,7 +12100,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -12110,7 +12110,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -12127,7 +12127,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -12140,12 +12140,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -12161,7 +12161,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -12172,7 +12172,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -12180,7 +12180,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -12191,11 +12191,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName tableName = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -12663,11 +12663,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional uint64 nonce_group = 2 [default = 0];
/**
@@ -12741,11 +12741,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -12805,7 +12805,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -12815,13 +12815,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -12858,7 +12858,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
}
@@ -13086,7 +13086,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -13200,9 +13200,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -13212,7 +13212,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -13222,7 +13222,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -13239,7 +13239,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -13252,12 +13252,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -13273,7 +13273,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -13284,7 +13284,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -13292,7 +13292,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -13303,11 +13303,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -13838,11 +13838,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional uint64 nonce_group = 2 [default = 0];
/**
@@ -13916,11 +13916,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -13980,7 +13980,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -13990,13 +13990,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -14033,7 +14033,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
}
@@ -14261,7 +14261,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -14375,9 +14375,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -14387,7 +14387,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -14397,7 +14397,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -14414,7 +14414,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -14427,12 +14427,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -14448,7 +14448,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -14459,7 +14459,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -14467,7 +14467,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -14478,11 +14478,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -15013,11 +15013,11 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required .hbase.pb.TableSchema table_schema = 2;
/**
@@ -15105,11 +15105,11 @@ public final class MasterProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -15182,7 +15182,7 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -15192,13 +15192,13 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -15257,7 +15257,7 @@ public final class MasterProtos {
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
nonceGroup_ = 0L;
nonce_ = 0L;
@@ -15511,7 +15511,7 @@ public final class MasterProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -15650,9 +15650,9 @@ public final class MasterProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -15662,7 +15662,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -15672,7 +15672,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -15689,7 +15689,7 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -15702,12 +15702,12 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -15723,7 +15723,7 @@ public final class MasterProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -15734,7 +15734,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -15742,7 +15742,7 @@ public final class MasterProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -15753,11 +15753,11 @@ public final class MasterProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -23390,12 +23390,12 @@ public final class MasterProtos {
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName>
getTableNameList();
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index);
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName(int index);
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
@@ -23403,12 +23403,12 @@ public final class MasterProtos {
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameOrBuilderList();
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder(
int index);
}
/**
@@ -23464,10 +23464,10 @@ public final class MasterProtos {
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
+ tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName>();
mutable_bitField0_ |= 0x00000001;
}
- tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
+ tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry));
break;
}
}
@@ -23514,17 +23514,17 @@ public final class MasterProtos {
// repeated .hbase.pb.TableName tableName = 1;
public static final int TABLENAME_FIELD_NUMBER = 1;
- private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName> tableName_;
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() {
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName> getTableNameList() {
return tableName_;
}
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameOrBuilderList() {
return tableName_;
}
@@ -23537,13 +23537,13 @@ public final class MasterProtos {
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName(int index) {
return tableName_.get(index);
}
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder(
int index) {
return tableName_.get(index);
}
@@ -23854,22 +23854,22 @@ public final class MasterProtos {
private int bitField0_;
// repeated .hbase.pb.TableName tableName = 1;
- private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_ =
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName> tableName_ =
java.util.Collections.emptyList();
private void ensureTableNameIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
- tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableName_);
+ tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName>(tableName_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() {
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName> getTableNameList() {
if (tableNameBuilder_ == null) {
return java.util.Collections.unmodifiableList(tableName_);
} else {
@@ -23889,7 +23889,7 @@ public final class MasterProtos {
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName(int index) {
if (tableNameBuilder_ == null) {
return tableName_.get(index);
} else {
@@ -23900,7 +23900,7 @@ public final class MasterProtos {
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
public Builder setTableName(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ int index, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -23917,7 +23917,7 @@ public final class MasterProtos {
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
public Builder setTableName(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ int index, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
ensureTableNameIsMutable();
tableName_.set(index, builderForValue.build());
@@ -23930,7 +23930,7 @@ public final class MasterProtos {
/**
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
- public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -23947,7 +23947,7 @@ public final class MasterProtos {
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
public Builder addTableName(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ int index, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -23964,7 +23964,7 @@ public final class MasterProtos {
* <code>repeated .hbase.pb.TableName tableName = 1;</code>
*/
public Builder addTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.
<TRUNCATED>
[05/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
index d40c1f7..6a8cd7d 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
@@ -3814,11 +3814,11 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional .hbase.pb.TableSchema table_schema = 4;
/**
@@ -3929,11 +3929,11 @@ public final class MasterProcedureProtos {
break;
}
case 26: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -4045,7 +4045,7 @@ public final class MasterProcedureProtos {
// optional .hbase.pb.TableName table_name = 3;
public static final int TABLE_NAME_FIELD_NUMBER = 3;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
@@ -4055,13 +4055,13 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -4126,7 +4126,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
preserveSplits_ = false;
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
regionInfo_ = java.util.Collections.emptyList();
}
@@ -4416,7 +4416,7 @@ public final class MasterProcedureProtos {
preserveSplits_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -4760,9 +4760,9 @@ public final class MasterProcedureProtos {
}
// optional .hbase.pb.TableName table_name = 3;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
@@ -4772,7 +4772,7 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -4782,7 +4782,7 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -4799,7 +4799,7 @@ public final class MasterProcedureProtos {
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -4812,12 +4812,12 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -4833,7 +4833,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -4844,7 +4844,7 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -4852,7 +4852,7 @@ public final class MasterProcedureProtos {
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -4863,11 +4863,11 @@ public final class MasterProcedureProtos {
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -5269,11 +5269,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// repeated .hbase.pb.RegionInfo region_info = 3;
/**
@@ -5365,11 +5365,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -5452,7 +5452,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -5462,13 +5462,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -5510,7 +5510,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
regionInfo_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
@@ -5756,7 +5756,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -6035,9 +6035,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -6047,7 +6047,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -6057,7 +6057,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -6074,7 +6074,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -6087,12 +6087,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -6108,7 +6108,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -6119,7 +6119,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -6127,7 +6127,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -6138,11 +6138,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -8492,11 +8492,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3;
/**
@@ -8591,11 +8591,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -8693,7 +8693,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -8703,13 +8703,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -8759,7 +8759,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
}
@@ -9034,7 +9034,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -9314,9 +9314,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -9326,7 +9326,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -9336,7 +9336,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -9353,7 +9353,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -9366,12 +9366,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -9387,7 +9387,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -9398,7 +9398,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -9406,7 +9406,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -9417,11 +9417,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -9700,11 +9700,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required .hbase.pb.ColumnFamilySchema columnfamily_schema = 3;
/**
@@ -9799,11 +9799,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -9901,7 +9901,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -9911,13 +9911,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -9967,7 +9967,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
}
@@ -10242,7 +10242,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -10522,9 +10522,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -10534,7 +10534,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -10544,7 +10544,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -10561,7 +10561,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -10574,12 +10574,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -10595,7 +10595,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -10606,7 +10606,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -10614,7 +10614,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -10625,11 +10625,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -10908,11 +10908,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required bytes columnfamily_name = 3;
/**
@@ -11003,11 +11003,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -11097,7 +11097,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -11107,13 +11107,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -11157,7 +11157,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
columnfamilyName_ = com.google.protobuf.ByteString.EMPTY;
unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
}
@@ -11427,7 +11427,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -11695,9 +11695,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -11707,7 +11707,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -11717,7 +11717,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -11734,7 +11734,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -11747,12 +11747,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -11768,7 +11768,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -11779,7 +11779,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -11787,7 +11787,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -11798,11 +11798,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -12000,11 +12000,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required bool skip_table_state_check = 3;
/**
@@ -12081,11 +12081,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -12162,7 +12162,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -12172,13 +12172,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -12200,7 +12200,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
skipTableStateCheck_ = false;
}
private byte memoizedIsInitialized = -1;
@@ -12446,7 +12446,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -12691,9 +12691,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -12703,7 +12703,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -12713,7 +12713,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -12730,7 +12730,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -12743,12 +12743,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -12764,7 +12764,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -12775,7 +12775,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -12783,7 +12783,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -12794,11 +12794,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -12876,11 +12876,11 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
// required bool skip_table_state_check = 3;
/**
@@ -12957,11 +12957,11 @@ public final class MasterProcedureProtos {
break;
}
case 18: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -13038,7 +13038,7 @@ public final class MasterProcedureProtos {
// required .hbase.pb.TableName table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -13048,13 +13048,13 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
@@ -13076,7 +13076,7 @@ public final class MasterProcedureProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
skipTableStateCheck_ = false;
}
private byte memoizedIsInitialized = -1;
@@ -13322,7 +13322,7 @@ public final class MasterProcedureProtos {
}
bitField0_ = (bitField0_ & ~0x00000001);
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -13567,9 +13567,9 @@ public final class MasterProcedureProtos {
}
// required .hbase.pb.TableName table_name = 2;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
@@ -13579,7 +13579,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -13589,7 +13589,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -13606,7 +13606,7 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -13619,12 +13619,12 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -13640,7 +13640,7 @@ public final class MasterProcedureProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -13651,7 +13651,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -13659,7 +13659,7 @@ public final class MasterProcedureProtos {
/**
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -13670,11 +13670,11 @@ public final class MasterProcedureProtos {
* <code>required .hbase.pb.TableName table_name = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -15392,144 +15392,145 @@ public final class MasterProcedureProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\025MasterProcedure.proto\022\010hbase.pb\032\013HBase" +
- ".proto\032\tRPC.proto\"\234\001\n\024CreateTableStateDa" +
- "ta\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInf" +
- "ormation\022+\n\014table_schema\030\002 \002(\0132\025.hbase.p" +
- "b.TableSchema\022)\n\013region_info\030\003 \003(\0132\024.hba" +
- "se.pb.RegionInfo\"\332\001\n\024ModifyTableStateDat" +
- "a\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInfo" +
- "rmation\0226\n\027unmodified_table_schema\030\002 \001(\013" +
- "2\025.hbase.pb.TableSchema\0224\n\025modified_tabl" +
- "e_schema\030\003 \002(\0132\025.hbase.pb.TableSchema\022&\n",
- "\036delete_column_family_in_modify\030\004 \002(\010\"\340\001" +
- "\n\026TruncateTableStateData\022,\n\tuser_info\030\001 " +
- "\002(\0132\031.hbase.pb.UserInformation\022\027\n\017preser" +
- "ve_splits\030\002 \002(\010\022\'\n\ntable_name\030\003 \001(\0132\023.hb" +
- "ase.pb.TableName\022+\n\014table_schema\030\004 \001(\0132\025" +
- ".hbase.pb.TableSchema\022)\n\013region_info\030\005 \003" +
- "(\0132\024.hbase.pb.RegionInfo\"\230\001\n\024DeleteTable" +
- "StateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb." +
- "UserInformation\022\'\n\ntable_name\030\002 \002(\0132\023.hb" +
- "ase.pb.TableName\022)\n\013region_info\030\003 \003(\0132\024.",
- "hbase.pb.RegionInfo\"W\n\030CreateNamespaceSt" +
- "ateData\022;\n\024namespace_descriptor\030\001 \002(\0132\035." +
- "hbase.pb.NamespaceDescriptor\"\237\001\n\030ModifyN" +
- "amespaceStateData\022;\n\024namespace_descripto" +
- "r\030\001 \002(\0132\035.hbase.pb.NamespaceDescriptor\022F" +
- "\n\037unmodified_namespace_descriptor\030\002 \001(\0132" +
- "\035.hbase.pb.NamespaceDescriptor\"o\n\030Delete" +
- "NamespaceStateData\022\026\n\016namespace_name\030\001 \002" +
- "(\t\022;\n\024namespace_descriptor\030\002 \001(\0132\035.hbase" +
- ".pb.NamespaceDescriptor\"\344\001\n\030AddColumnFam",
- "ilyStateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase." +
- "pb.UserInformation\022\'\n\ntable_name\030\002 \002(\0132\023" +
- ".hbase.pb.TableName\0229\n\023columnfamily_sche" +
- "ma\030\003 \002(\0132\034.hbase.pb.ColumnFamilySchema\0226" +
- "\n\027unmodified_table_schema\030\004 \001(\0132\025.hbase." +
- "pb.TableSchema\"\347\001\n\033ModifyColumnFamilySta" +
- "teData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.Use" +
- "rInformation\022\'\n\ntable_name\030\002 \002(\0132\023.hbase" +
- ".pb.TableName\0229\n\023columnfamily_schema\030\003 \002" +
- "(\0132\034.hbase.pb.ColumnFamilySchema\0226\n\027unmo",
- "dified_table_schema\030\004 \001(\0132\025.hbase.pb.Tab" +
- "leSchema\"\307\001\n\033DeleteColumnFamilyStateData" +
- "\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInfor" +
- "mation\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.Ta" +
- "bleName\022\031\n\021columnfamily_name\030\003 \002(\014\0226\n\027un" +
- "modified_table_schema\030\004 \001(\0132\025.hbase.pb.T" +
- "ableSchema\"\215\001\n\024EnableTableStateData\022,\n\tu" +
- "ser_info\030\001 \002(\0132\031.hbase.pb.UserInformatio" +
- "n\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.TableNa" +
- "me\022\036\n\026skip_table_state_check\030\003 \002(\010\"\216\001\n\025D",
- "isableTableStateData\022,\n\tuser_info\030\001 \002(\0132" +
- "\031.hbase.pb.UserInformation\022\'\n\ntable_name" +
- "\030\002 \002(\0132\023.hbase.pb.TableName\022\036\n\026skip_tabl" +
- "e_state_check\030\003 \002(\010\"\201\002\n\024ServerCrashState" +
- "Data\022)\n\013server_name\030\001 \002(\0132\024.hbase.pb.Ser" +
- "verName\022\036\n\026distributed_log_replay\030\002 \001(\010\022" +
- "7\n\031regions_on_crashed_server\030\003 \003(\0132\024.hba" +
- "se.pb.RegionInfo\022.\n\020regions_assigned\030\004 \003" +
- "(\0132\024.hbase.pb.RegionInfo\022\025\n\rcarrying_met" +
- "a\030\005 \001(\010\022\036\n\020should_split_wal\030\006 \001(\010:\004true*",
- "\330\001\n\020CreateTableState\022\036\n\032CREATE_TABLE_PRE" +
- "_OPERATION\020\001\022 \n\034CREATE_TABLE_WRITE_FS_LA" +
- "YOUT\020\002\022\034\n\030CREATE_TABLE_ADD_TO_META\020\003\022\037\n\033" +
- "CREATE_TABLE_ASSIGN_REGIONS\020\004\022\"\n\036CREATE_" +
- "TABLE_UPDATE_DESC_CACHE\020\005\022\037\n\033CREATE_TABL" +
- "E_POST_OPERATION\020\006*\207\002\n\020ModifyTableState\022" +
- "\030\n\024MODIFY_TABLE_PREPARE\020\001\022\036\n\032MODIFY_TABL" +
- "E_PRE_OPERATION\020\002\022(\n$MODIFY_TABLE_UPDATE" +
- "_TABLE_DESCRIPTOR\020\003\022&\n\"MODIFY_TABLE_REMO" +
- "VE_REPLICA_COLUMN\020\004\022!\n\035MODIFY_TABLE_DELE",
- "TE_FS_LAYOUT\020\005\022\037\n\033MODIFY_TABLE_POST_OPER" +
- "ATION\020\006\022#\n\037MODIFY_TABLE_REOPEN_ALL_REGIO" +
- "NS\020\007*\212\002\n\022TruncateTableState\022 \n\034TRUNCATE_" +
- "TABLE_PRE_OPERATION\020\001\022#\n\037TRUNCATE_TABLE_" +
- "REMOVE_FROM_META\020\002\022\"\n\036TRUNCATE_TABLE_CLE" +
- "AR_FS_LAYOUT\020\003\022#\n\037TRUNCATE_TABLE_CREATE_" +
- "FS_LAYOUT\020\004\022\036\n\032TRUNCATE_TABLE_ADD_TO_MET" +
- "A\020\005\022!\n\035TRUNCATE_TABLE_ASSIGN_REGIONS\020\006\022!" +
- "\n\035TRUNCATE_TABLE_POST_OPERATION\020\007*\337\001\n\020De" +
- "leteTableState\022\036\n\032DELETE_TABLE_PRE_OPERA",
- "TION\020\001\022!\n\035DELETE_TABLE_REMOVE_FROM_META\020" +
- "\002\022 \n\034DELETE_TABLE_CLEAR_FS_LAYOUT\020\003\022\"\n\036D" +
- "ELETE_TABLE_UPDATE_DESC_CACHE\020\004\022!\n\035DELET" +
- "E_TABLE_UNASSIGN_REGIONS\020\005\022\037\n\033DELETE_TAB" +
- "LE_POST_OPERATION\020\006*\320\001\n\024CreateNamespaceS" +
- "tate\022\034\n\030CREATE_NAMESPACE_PREPARE\020\001\022%\n!CR" +
- "EATE_NAMESPACE_CREATE_DIRECTORY\020\002\022)\n%CRE" +
- "ATE_NAMESPACE_INSERT_INTO_NS_TABLE\020\003\022\036\n\032" +
- "CREATE_NAMESPACE_UPDATE_ZK\020\004\022(\n$CREATE_N" +
- "AMESPACE_SET_NAMESPACE_QUOTA\020\005*z\n\024Modify",
- "NamespaceState\022\034\n\030MODIFY_NAMESPACE_PREPA" +
- "RE\020\001\022$\n MODIFY_NAMESPACE_UPDATE_NS_TABLE" +
- "\020\002\022\036\n\032MODIFY_NAMESPACE_UPDATE_ZK\020\003*\332\001\n\024D" +
- "eleteNamespaceState\022\034\n\030DELETE_NAMESPACE_" +
- "PREPARE\020\001\022)\n%DELETE_NAMESPACE_DELETE_FRO" +
- "M_NS_TABLE\020\002\022#\n\037DELETE_NAMESPACE_REMOVE_" +
- "FROM_ZK\020\003\022\'\n#DELETE_NAMESPACE_DELETE_DIR" +
- "ECTORIES\020\004\022+\n\'DELETE_NAMESPACE_REMOVE_NA" +
- "MESPACE_QUOTA\020\005*\331\001\n\024AddColumnFamilyState" +
- "\022\035\n\031ADD_COLUMN_FAMILY_PREPARE\020\001\022#\n\037ADD_C",
- "OLUMN_FAMILY_PRE_OPERATION\020\002\022-\n)ADD_COLU" +
- "MN_FAMILY_UPDATE_TABLE_DESCRIPTOR\020\003\022$\n A" +
- "DD_COLUMN_FAMILY_POST_OPERATION\020\004\022(\n$ADD" +
- "_COLUMN_FAMILY_REOPEN_ALL_REGIONS\020\005*\353\001\n\027" +
- "ModifyColumnFamilyState\022 \n\034MODIFY_COLUMN" +
- "_FAMILY_PREPARE\020\001\022&\n\"MODIFY_COLUMN_FAMIL" +
- "Y_PRE_OPERATION\020\002\0220\n,MODIFY_COLUMN_FAMIL" +
- "Y_UPDATE_TABLE_DESCRIPTOR\020\003\022\'\n#MODIFY_CO" +
- "LUMN_FAMILY_POST_OPERATION\020\004\022+\n\'MODIFY_C" +
- "OLUMN_FAMILY_REOPEN_ALL_REGIONS\020\005*\226\002\n\027De",
- "leteColumnFamilyState\022 \n\034DELETE_COLUMN_F" +
- "AMILY_PREPARE\020\001\022&\n\"DELETE_COLUMN_FAMILY_" +
- "PRE_OPERATION\020\002\0220\n,DELETE_COLUMN_FAMILY_" +
- "UPDATE_TABLE_DESCRIPTOR\020\003\022)\n%DELETE_COLU" +
- "MN_FAMILY_DELETE_FS_LAYOUT\020\004\022\'\n#DELETE_C" +
- "OLUMN_FAMILY_POST_OPERATION\020\005\022+\n\'DELETE_" +
- "COLUMN_FAMILY_REOPEN_ALL_REGIONS\020\006*\350\001\n\020E" +
- "nableTableState\022\030\n\024ENABLE_TABLE_PREPARE\020" +
- "\001\022\036\n\032ENABLE_TABLE_PRE_OPERATION\020\002\022)\n%ENA" +
- "BLE_TABLE_SET_ENABLING_TABLE_STATE\020\003\022$\n ",
- "ENABLE_TABLE_MARK_REGIONS_ONLINE\020\004\022(\n$EN" +
- "ABLE_TABLE_SET_ENABLED_TABLE_STATE\020\005\022\037\n\033" +
- "ENABLE_TABLE_POST_OPERATION\020\006*\362\001\n\021Disabl" +
- "eTableState\022\031\n\025DISABLE_TABLE_PREPARE\020\001\022\037" +
- "\n\033DISABLE_TABLE_PRE_OPERATION\020\002\022+\n\'DISAB" +
- "LE_TABLE_SET_DISABLING_TABLE_STATE\020\003\022&\n\"" +
- "DISABLE_TABLE_MARK_REGIONS_OFFLINE\020\004\022*\n&" +
- "DISABLE_TABLE_SET_DISABLED_TABLE_STATE\020\005" +
- "\022 \n\034DISABLE_TABLE_POST_OPERATION\020\006*\234\002\n\020S" +
- "erverCrashState\022\026\n\022SERVER_CRASH_START\020\001\022",
- "\035\n\031SERVER_CRASH_PROCESS_META\020\002\022\034\n\030SERVER" +
- "_CRASH_GET_REGIONS\020\003\022\036\n\032SERVER_CRASH_NO_" +
- "SPLIT_LOGS\020\004\022\033\n\027SERVER_CRASH_SPLIT_LOGS\020" +
- "\005\022#\n\037SERVER_CRASH_PREPARE_LOG_REPLAY\020\006\022\027" +
- "\n\023SERVER_CRASH_ASSIGN\020\010\022\037\n\033SERVER_CRASH_" +
- "WAIT_ON_ASSIGN\020\t\022\027\n\023SERVER_CRASH_FINISH\020" +
- "dBK\n*org.apache.hadoop.hbase.protobuf.ge" +
- "neratedB\025MasterProcedureProtosH\001\210\001\001\240\001\001"
+ "\n\025MasterProcedure.proto\022\010hbase.pb\032\013Table" +
+ ".proto\032\013HBase.proto\032\tRPC.proto\"\234\001\n\024Creat" +
+ "eTableStateData\022,\n\tuser_info\030\001 \002(\0132\031.hba" +
+ "se.pb.UserInformation\022+\n\014table_schema\030\002 " +
+ "\002(\0132\025.hbase.pb.TableSchema\022)\n\013region_inf" +
+ "o\030\003 \003(\0132\024.hbase.pb.RegionInfo\"\332\001\n\024Modify" +
+ "TableStateData\022,\n\tuser_info\030\001 \002(\0132\031.hbas" +
+ "e.pb.UserInformation\0226\n\027unmodified_table" +
+ "_schema\030\002 \001(\0132\025.hbase.pb.TableSchema\0224\n\025" +
+ "modified_table_schema\030\003 \002(\0132\025.hbase.pb.T",
+ "ableSchema\022&\n\036delete_column_family_in_mo" +
+ "dify\030\004 \002(\010\"\340\001\n\026TruncateTableStateData\022,\n" +
+ "\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInformat" +
+ "ion\022\027\n\017preserve_splits\030\002 \002(\010\022\'\n\ntable_na" +
+ "me\030\003 \001(\0132\023.hbase.pb.TableName\022+\n\014table_s" +
+ "chema\030\004 \001(\0132\025.hbase.pb.TableSchema\022)\n\013re" +
+ "gion_info\030\005 \003(\0132\024.hbase.pb.RegionInfo\"\230\001" +
+ "\n\024DeleteTableStateData\022,\n\tuser_info\030\001 \002(" +
+ "\0132\031.hbase.pb.UserInformation\022\'\n\ntable_na" +
+ "me\030\002 \002(\0132\023.hbase.pb.TableName\022)\n\013region_",
+ "info\030\003 \003(\0132\024.hbase.pb.RegionInfo\"W\n\030Crea" +
+ "teNamespaceStateData\022;\n\024namespace_descri" +
+ "ptor\030\001 \002(\0132\035.hbase.pb.NamespaceDescripto" +
+ "r\"\237\001\n\030ModifyNamespaceStateData\022;\n\024namesp" +
+ "ace_descriptor\030\001 \002(\0132\035.hbase.pb.Namespac" +
+ "eDescriptor\022F\n\037unmodified_namespace_desc" +
+ "riptor\030\002 \001(\0132\035.hbase.pb.NamespaceDescrip" +
+ "tor\"o\n\030DeleteNamespaceStateData\022\026\n\016names" +
+ "pace_name\030\001 \002(\t\022;\n\024namespace_descriptor\030" +
+ "\002 \001(\0132\035.hbase.pb.NamespaceDescriptor\"\344\001\n",
+ "\030AddColumnFamilyStateData\022,\n\tuser_info\030\001" +
+ " \002(\0132\031.hbase.pb.UserInformation\022\'\n\ntable" +
+ "_name\030\002 \002(\0132\023.hbase.pb.TableName\0229\n\023colu" +
+ "mnfamily_schema\030\003 \002(\0132\034.hbase.pb.ColumnF" +
+ "amilySchema\0226\n\027unmodified_table_schema\030\004" +
+ " \001(\0132\025.hbase.pb.TableSchema\"\347\001\n\033ModifyCo" +
+ "lumnFamilyStateData\022,\n\tuser_info\030\001 \002(\0132\031" +
+ ".hbase.pb.UserInformation\022\'\n\ntable_name\030" +
+ "\002 \002(\0132\023.hbase.pb.TableName\0229\n\023columnfami" +
+ "ly_schema\030\003 \002(\0132\034.hbase.pb.ColumnFamilyS",
+ "chema\0226\n\027unmodified_table_schema\030\004 \001(\0132\025" +
+ ".hbase.pb.TableSchema\"\307\001\n\033DeleteColumnFa" +
+ "milyStateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase" +
+ ".pb.UserInformation\022\'\n\ntable_name\030\002 \002(\0132" +
+ "\023.hbase.pb.TableName\022\031\n\021columnfamily_nam" +
+ "e\030\003 \002(\014\0226\n\027unmodified_table_schema\030\004 \001(\013" +
+ "2\025.hbase.pb.TableSchema\"\215\001\n\024EnableTableS" +
+ "tateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.U" +
+ "serInformation\022\'\n\ntable_name\030\002 \002(\0132\023.hba" +
+ "se.pb.TableName\022\036\n\026skip_table_state_chec",
+ "k\030\003 \002(\010\"\216\001\n\025DisableTableStateData\022,\n\tuse" +
+ "r_info\030\001 \002(\0132\031.hbase.pb.UserInformation\022" +
+ "\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.TableName" +
+ "\022\036\n\026skip_table_state_check\030\003 \002(\010\"\201\002\n\024Ser" +
+ "verCrashStateData\022)\n\013server_name\030\001 \002(\0132\024" +
+ ".hbase.pb.ServerName\022\036\n\026distributed_log_" +
+ "replay\030\002 \001(\010\0227\n\031regions_on_crashed_serve" +
+ "r\030\003 \003(\0132\024.hbase.pb.RegionInfo\022.\n\020regions" +
+ "_assigned\030\004 \003(\0132\024.hbase.pb.RegionInfo\022\025\n" +
+ "\rcarrying_meta\030\005 \001(\010\022\036\n\020should_split_wal",
+ "\030\006 \001(\010:\004true*\330\001\n\020CreateTableState\022\036\n\032CRE" +
+ "ATE_TABLE_PRE_OPERATION\020\001\022 \n\034CREATE_TABL" +
+ "E_WRITE_FS_LAYOUT\020\002\022\034\n\030CREATE_TABLE_ADD_" +
+ "TO_META\020\003\022\037\n\033CREATE_TABLE_ASSIGN_REGIONS" +
+ "\020\004\022\"\n\036CREATE_TABLE_UPDATE_DESC_CACHE\020\005\022\037" +
+ "\n\033CREATE_TABLE_POST_OPERATION\020\006*\207\002\n\020Modi" +
+ "fyTableState\022\030\n\024MODIFY_TABLE_PREPARE\020\001\022\036" +
+ "\n\032MODIFY_TABLE_PRE_OPERATION\020\002\022(\n$MODIFY" +
+ "_TABLE_UPDATE_TABLE_DESCRIPTOR\020\003\022&\n\"MODI" +
+ "FY_TABLE_REMOVE_REPLICA_COLUMN\020\004\022!\n\035MODI",
+ "FY_TABLE_DELETE_FS_LAYOUT\020\005\022\037\n\033MODIFY_TA" +
+ "BLE_POST_OPERATION\020\006\022#\n\037MODIFY_TABLE_REO" +
+ "PEN_ALL_REGIONS\020\007*\212\002\n\022TruncateTableState" +
+ "\022 \n\034TRUNCATE_TABLE_PRE_OPERATION\020\001\022#\n\037TR" +
+ "UNCATE_TABLE_REMOVE_FROM_META\020\002\022\"\n\036TRUNC" +
+ "ATE_TABLE_CLEAR_FS_LAYOUT\020\003\022#\n\037TRUNCATE_" +
+ "TABLE_CREATE_FS_LAYOUT\020\004\022\036\n\032TRUNCATE_TAB" +
+ "LE_ADD_TO_META\020\005\022!\n\035TRUNCATE_TABLE_ASSIG" +
+ "N_REGIONS\020\006\022!\n\035TRUNCATE_TABLE_POST_OPERA" +
+ "TION\020\007*\337\001\n\020DeleteTableState\022\036\n\032DELETE_TA",
+ "BLE_PRE_OPERATION\020\001\022!\n\035DELETE_TABLE_REMO" +
+ "VE_FROM_META\020\002\022 \n\034DELETE_TABLE_CLEAR_FS_" +
+ "LAYOUT\020\003\022\"\n\036DELETE_TABLE_UPDATE_DESC_CAC" +
+ "HE\020\004\022!\n\035DELETE_TABLE_UNASSIGN_REGIONS\020\005\022" +
+ "\037\n\033DELETE_TABLE_POST_OPERATION\020\006*\320\001\n\024Cre" +
+ "ateNamespaceState\022\034\n\030CREATE_NAMESPACE_PR" +
+ "EPARE\020\001\022%\n!CREATE_NAMESPACE_CREATE_DIREC" +
+ "TORY\020\002\022)\n%CREATE_NAMESPACE_INSERT_INTO_N" +
+ "S_TABLE\020\003\022\036\n\032CREATE_NAMESPACE_UPDATE_ZK\020" +
+ "\004\022(\n$CREATE_NAMESPACE_SET_NAMESPACE_QUOT",
+ "A\020\005*z\n\024ModifyNamespaceState\022\034\n\030MODIFY_NA" +
+ "MESPACE_PREPARE\020\001\022$\n MODIFY_NAMESPACE_UP" +
+ "DATE_NS_TABLE\020\002\022\036\n\032MODIFY_NAMESPACE_UPDA" +
+ "TE_ZK\020\003*\332\001\n\024DeleteNamespaceState\022\034\n\030DELE" +
+ "TE_NAMESPACE_PREPARE\020\001\022)\n%DELETE_NAMESPA" +
+ "CE_DELETE_FROM_NS_TABLE\020\002\022#\n\037DELETE_NAME" +
+ "SPACE_REMOVE_FROM_ZK\020\003\022\'\n#DELETE_NAMESPA" +
+ "CE_DELETE_DIRECTORIES\020\004\022+\n\'DELETE_NAMESP" +
+ "ACE_REMOVE_NAMESPACE_QUOTA\020\005*\331\001\n\024AddColu" +
+ "mnFamilyState\022\035\n\031ADD_COLUMN_FAMILY_PREPA",
+ "RE\020\001\022#\n\037ADD_COLUMN_FAMILY_PRE_OPERATION\020" +
+ "\002\022-\n)ADD_COLUMN_FAMILY_UPDATE_TABLE_DESC" +
+ "RIPTOR\020\003\022$\n ADD_COLUMN_FAMILY_POST_OPERA" +
+ "TION\020\004\022(\n$ADD_COLUMN_FAMILY_REOPEN_ALL_R" +
+ "EGIONS\020\005*\353\001\n\027ModifyColumnFamilyState\022 \n\034" +
+ "MODIFY_COLUMN_FAMILY
<TRUNCATED>
[03/11] hbase git commit: HBASE-18431 Mitigate compatibility concerns
between branch-1.3 and branch-1.4
Posted by ap...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
index 538e031..8521ba8 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java
@@ -2517,11 +2517,11 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.PrepareBulkLoadRequest}
@@ -2575,11 +2575,11 @@ public final class SecureBulkLoadProtos {
break;
}
case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
- tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
@@ -2629,7 +2629,7 @@ public final class SecureBulkLoadProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -2639,18 +2639,18 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
private void initFields() {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -2844,7 +2844,7 @@ public final class SecureBulkLoadProtos {
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
@@ -2940,9 +2940,9 @@ public final class SecureBulkLoadProtos {
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
@@ -2952,7 +2952,7 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
@@ -2962,7 +2962,7 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -2979,7 +2979,7 @@ public final class SecureBulkLoadProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
@@ -2992,12 +2992,12 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
- tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
tableName_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
@@ -3013,7 +3013,7 @@ public final class SecureBulkLoadProtos {
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
- tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
@@ -3024,7 +3024,7 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
@@ -3032,7 +3032,7 @@ public final class SecureBulkLoadProtos {
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
@@ -3043,11 +3043,11 @@ public final class SecureBulkLoadProtos {
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
@@ -4857,30 +4857,31 @@ public final class SecureBulkLoadProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\024SecureBulkLoad.proto\022\010hbase.pb\032\013HBase." +
- "proto\032\014Client.proto\"\266\001\n\033SecureBulkLoadHF" +
- "ilesRequest\022>\n\013family_path\030\001 \003(\0132).hbase" +
- ".pb.BulkLoadHFileRequest.FamilyPath\022\026\n\016a" +
- "ssign_seq_num\030\002 \001(\010\022+\n\010fs_token\030\003 \002(\0132\031." +
- "hbase.pb.DelegationToken\022\022\n\nbulk_token\030\004" +
- " \002(\t\".\n\034SecureBulkLoadHFilesResponse\022\016\n\006" +
- "loaded\030\001 \002(\010\"V\n\017DelegationToken\022\022\n\nident" +
- "ifier\030\001 \001(\014\022\020\n\010password\030\002 \001(\014\022\014\n\004kind\030\003 " +
- "\001(\t\022\017\n\007service\030\004 \001(\t\"A\n\026PrepareBulkLoadR",
- "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" +
- "bleName\"-\n\027PrepareBulkLoadResponse\022\022\n\nbu" +
- "lk_token\030\001 \002(\t\",\n\026CleanupBulkLoadRequest" +
- "\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027CleanupBulkLoadR" +
- "esponse2\256\002\n\025SecureBulkLoadService\022V\n\017Pre" +
- "pareBulkLoad\022 .hbase.pb.PrepareBulkLoadR" +
- "equest\032!.hbase.pb.PrepareBulkLoadRespons" +
- "e\022e\n\024SecureBulkLoadHFiles\022%.hbase.pb.Sec" +
- "ureBulkLoadHFilesRequest\032&.hbase.pb.Secu" +
- "reBulkLoadHFilesResponse\022V\n\017CleanupBulkL",
- "oad\022 .hbase.pb.CleanupBulkLoadRequest\032!." +
- "hbase.pb.CleanupBulkLoadResponseBJ\n*org." +
- "apache.hadoop.hbase.protobuf.generatedB\024" +
- "SecureBulkLoadProtosH\001\210\001\001\240\001\001"
+ "\n\024SecureBulkLoad.proto\022\010hbase.pb\032\013Table." +
+ "proto\032\013HBase.proto\032\014Client.proto\"\266\001\n\033Sec" +
+ "ureBulkLoadHFilesRequest\022>\n\013family_path\030" +
+ "\001 \003(\0132).hbase.pb.BulkLoadHFileRequest.Fa" +
+ "milyPath\022\026\n\016assign_seq_num\030\002 \001(\010\022+\n\010fs_t" +
+ "oken\030\003 \002(\0132\031.hbase.pb.DelegationToken\022\022\n" +
+ "\nbulk_token\030\004 \002(\t\".\n\034SecureBulkLoadHFile" +
+ "sResponse\022\016\n\006loaded\030\001 \002(\010\"V\n\017DelegationT" +
+ "oken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002 \001" +
+ "(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"A\n\026Pre",
+ "pareBulkLoadRequest\022\'\n\ntable_name\030\001 \002(\0132" +
+ "\023.hbase.pb.TableName\"-\n\027PrepareBulkLoadR" +
+ "esponse\022\022\n\nbulk_token\030\001 \002(\t\",\n\026CleanupBu" +
+ "lkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027Cle" +
+ "anupBulkLoadResponse2\256\002\n\025SecureBulkLoadS" +
+ "ervice\022V\n\017PrepareBulkLoad\022 .hbase.pb.Pre" +
+ "pareBulkLoadRequest\032!.hbase.pb.PrepareBu" +
+ "lkLoadResponse\022e\n\024SecureBulkLoadHFiles\022%" +
+ ".hbase.pb.SecureBulkLoadHFilesRequest\032&." +
+ "hbase.pb.SecureBulkLoadHFilesResponse\022V\n",
+ "\017CleanupBulkLoad\022 .hbase.pb.CleanupBulkL" +
+ "oadRequest\032!.hbase.pb.CleanupBulkLoadRes" +
+ "ponseBJ\n*org.apache.hadoop.hbase.protobu" +
+ "f.generatedB\024SecureBulkLoadProtosH\001\210\001\001\240\001" +
+ "\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -4935,6 +4936,7 @@ public final class SecureBulkLoadProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
}, assigner);
http://git-wip-us.apache.org/repos/asf/hbase/blob/e9e16b59/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 2e11b4a..9805d50 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -8,1494 +8,6 @@ public final class SnapshotProtos {
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
- public interface SnapshotDescriptionOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string name = 1;
- /**
- * <code>required string name = 1;</code>
- */
- boolean hasName();
- /**
- * <code>required string name = 1;</code>
- */
- java.lang.String getName();
- /**
- * <code>required string name = 1;</code>
- */
- com.google.protobuf.ByteString
- getNameBytes();
-
- // optional string table = 2;
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- boolean hasTable();
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- java.lang.String getTable();
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- com.google.protobuf.ByteString
- getTableBytes();
-
- // optional int64 creation_time = 3 [default = 0];
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- boolean hasCreationTime();
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- long getCreationTime();
-
- // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- boolean hasType();
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType();
-
- // optional int32 version = 5;
- /**
- * <code>optional int32 version = 5;</code>
- */
- boolean hasVersion();
- /**
- * <code>optional int32 version = 5;</code>
- */
- int getVersion();
-
- // optional string owner = 6;
- /**
- * <code>optional string owner = 6;</code>
- */
- boolean hasOwner();
- /**
- * <code>optional string owner = 6;</code>
- */
- java.lang.String getOwner();
- /**
- * <code>optional string owner = 6;</code>
- */
- com.google.protobuf.ByteString
- getOwnerBytes();
-
- // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- boolean hasUsersAndPermissions();
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions();
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder();
- }
- /**
- * Protobuf type {@code hbase.pb.SnapshotDescription}
- *
- * <pre>
- **
- * Description of the snapshot to take
- * </pre>
- */
- public static final class SnapshotDescription extends
- com.google.protobuf.GeneratedMessage
- implements SnapshotDescriptionOrBuilder {
- // Use SnapshotDescription.newBuilder() to construct.
- private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final SnapshotDescription defaultInstance;
- public static SnapshotDescription getDefaultInstance() {
- return defaultInstance;
- }
-
- public SnapshotDescription getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private SnapshotDescription(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- name_ = input.readBytes();
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- table_ = input.readBytes();
- break;
- }
- case 24: {
- bitField0_ |= 0x00000004;
- creationTime_ = input.readInt64();
- break;
- }
- case 32: {
- int rawValue = input.readEnum();
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(rawValue);
- if (value == null) {
- unknownFields.mergeVarintField(4, rawValue);
- } else {
- bitField0_ |= 0x00000008;
- type_ = value;
- }
- break;
- }
- case 40: {
- bitField0_ |= 0x00000010;
- version_ = input.readInt32();
- break;
- }
- case 50: {
- bitField0_ |= 0x00000020;
- owner_ = input.readBytes();
- break;
- }
- case 58: {
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder subBuilder = null;
- if (((bitField0_ & 0x00000040) == 0x00000040)) {
- subBuilder = usersAndPermissions_.toBuilder();
- }
- usersAndPermissions_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(usersAndPermissions_);
- usersAndPermissions_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000040;
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
- }
-
- public static com.google.protobuf.Parser<SnapshotDescription> PARSER =
- new com.google.protobuf.AbstractParser<SnapshotDescription>() {
- public SnapshotDescription parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SnapshotDescription(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<SnapshotDescription> getParserForType() {
- return PARSER;
- }
-
- /**
- * Protobuf enum {@code hbase.pb.SnapshotDescription.Type}
- */
- public enum Type
- implements com.google.protobuf.ProtocolMessageEnum {
- /**
- * <code>DISABLED = 0;</code>
- */
- DISABLED(0, 0),
- /**
- * <code>FLUSH = 1;</code>
- */
- FLUSH(1, 1),
- /**
- * <code>SKIPFLUSH = 2;</code>
- */
- SKIPFLUSH(2, 2),
- ;
-
- /**
- * <code>DISABLED = 0;</code>
- */
- public static final int DISABLED_VALUE = 0;
- /**
- * <code>FLUSH = 1;</code>
- */
- public static final int FLUSH_VALUE = 1;
- /**
- * <code>SKIPFLUSH = 2;</code>
- */
- public static final int SKIPFLUSH_VALUE = 2;
-
-
- public final int getNumber() { return value; }
-
- public static Type valueOf(int value) {
- switch (value) {
- case 0: return DISABLED;
- case 1: return FLUSH;
- case 2: return SKIPFLUSH;
- default: return null;
- }
- }
-
- public static com.google.protobuf.Internal.EnumLiteMap<Type>
- internalGetValueMap() {
- return internalValueMap;
- }
- private static com.google.protobuf.Internal.EnumLiteMap<Type>
- internalValueMap =
- new com.google.protobuf.Internal.EnumLiteMap<Type>() {
- public Type findValueByNumber(int number) {
- return Type.valueOf(number);
- }
- };
-
- public final com.google.protobuf.Descriptors.EnumValueDescriptor
- getValueDescriptor() {
- return getDescriptor().getValues().get(index);
- }
- public final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
- public static final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
- }
-
- private static final Type[] VALUES = values();
-
- public static Type valueOf(
- com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
- if (desc.getType() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "EnumValueDescriptor is not for this type.");
- }
- return VALUES[desc.getIndex()];
- }
-
- private final int index;
- private final int value;
-
- private Type(int index, int value) {
- this.index = index;
- this.value = value;
- }
-
- // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type)
- }
-
- private int bitField0_;
- // required string name = 1;
- public static final int NAME_FIELD_NUMBER = 1;
- private java.lang.Object name_;
- /**
- * <code>required string name = 1;</code>
- */
- public boolean hasName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string name = 1;</code>
- */
- public java.lang.String getName() {
- java.lang.Object ref = name_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- name_ = s;
- }
- return s;
- }
- }
- /**
- * <code>required string name = 1;</code>
- */
- public com.google.protobuf.ByteString
- getNameBytes() {
- java.lang.Object ref = name_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- name_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // optional string table = 2;
- public static final int TABLE_FIELD_NUMBER = 2;
- private java.lang.Object table_;
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public boolean hasTable() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public java.lang.String getTable() {
- java.lang.Object ref = table_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- table_ = s;
- }
- return s;
- }
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public com.google.protobuf.ByteString
- getTableBytes() {
- java.lang.Object ref = table_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- table_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // optional int64 creation_time = 3 [default = 0];
- public static final int CREATION_TIME_FIELD_NUMBER = 3;
- private long creationTime_;
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public boolean hasCreationTime() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public long getCreationTime() {
- return creationTime_;
- }
-
- // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
- public static final int TYPE_FIELD_NUMBER = 4;
- private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_;
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public boolean hasType() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
- }
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
- return type_;
- }
-
- // optional int32 version = 5;
- public static final int VERSION_FIELD_NUMBER = 5;
- private int version_;
- /**
- * <code>optional int32 version = 5;</code>
- */
- public boolean hasVersion() {
- return ((bitField0_ & 0x00000010) == 0x00000010);
- }
- /**
- * <code>optional int32 version = 5;</code>
- */
- public int getVersion() {
- return version_;
- }
-
- // optional string owner = 6;
- public static final int OWNER_FIELD_NUMBER = 6;
- private java.lang.Object owner_;
- /**
- * <code>optional string owner = 6;</code>
- */
- public boolean hasOwner() {
- return ((bitField0_ & 0x00000020) == 0x00000020);
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public java.lang.String getOwner() {
- java.lang.Object ref = owner_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- owner_ = s;
- }
- return s;
- }
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public com.google.protobuf.ByteString
- getOwnerBytes() {
- java.lang.Object ref = owner_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- owner_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
- public static final int USERS_AND_PERMISSIONS_FIELD_NUMBER = 7;
- private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_;
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public boolean hasUsersAndPermissions() {
- return ((bitField0_ & 0x00000040) == 0x00000040);
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
- return usersAndPermissions_;
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
- return usersAndPermissions_;
- }
-
- private void initFields() {
- name_ = "";
- table_ = "";
- creationTime_ = 0L;
- type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
- version_ = 0;
- owner_ = "";
- usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (hasUsersAndPermissions()) {
- if (!getUsersAndPermissions().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getTableBytes());
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeInt64(3, creationTime_);
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- output.writeEnum(4, type_.getNumber());
- }
- if (((bitField0_ & 0x00000010) == 0x00000010)) {
- output.writeInt32(5, version_);
- }
- if (((bitField0_ & 0x00000020) == 0x00000020)) {
- output.writeBytes(6, getOwnerBytes());
- }
- if (((bitField0_ & 0x00000040) == 0x00000040)) {
- output.writeMessage(7, usersAndPermissions_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getTableBytes());
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeInt64Size(3, creationTime_);
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(4, type_.getNumber());
- }
- if (((bitField0_ & 0x00000010) == 0x00000010)) {
- size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(5, version_);
- }
- if (((bitField0_ & 0x00000020) == 0x00000020)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(6, getOwnerBytes());
- }
- if (((bitField0_ & 0x00000040) == 0x00000040)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(7, usersAndPermissions_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) obj;
-
- boolean result = true;
- result = result && (hasName() == other.hasName());
- if (hasName()) {
- result = result && getName()
- .equals(other.getName());
- }
- result = result && (hasTable() == other.hasTable());
- if (hasTable()) {
- result = result && getTable()
- .equals(other.getTable());
- }
- result = result && (hasCreationTime() == other.hasCreationTime());
- if (hasCreationTime()) {
- result = result && (getCreationTime()
- == other.getCreationTime());
- }
- result = result && (hasType() == other.hasType());
- if (hasType()) {
- result = result &&
- (getType() == other.getType());
- }
- result = result && (hasVersion() == other.hasVersion());
- if (hasVersion()) {
- result = result && (getVersion()
- == other.getVersion());
- }
- result = result && (hasOwner() == other.hasOwner());
- if (hasOwner()) {
- result = result && getOwner()
- .equals(other.getOwner());
- }
- result = result && (hasUsersAndPermissions() == other.hasUsersAndPermissions());
- if (hasUsersAndPermissions()) {
- result = result && getUsersAndPermissions()
- .equals(other.getUsersAndPermissions());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- private int memoizedHashCode = 0;
- @java.lang.Override
- public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasName()) {
- hash = (37 * hash) + NAME_FIELD_NUMBER;
- hash = (53 * hash) + getName().hashCode();
- }
- if (hasTable()) {
- hash = (37 * hash) + TABLE_FIELD_NUMBER;
- hash = (53 * hash) + getTable().hashCode();
- }
- if (hasCreationTime()) {
- hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
- hash = (53 * hash) + hashLong(getCreationTime());
- }
- if (hasType()) {
- hash = (37 * hash) + TYPE_FIELD_NUMBER;
- hash = (53 * hash) + hashEnum(getType());
- }
- if (hasVersion()) {
- hash = (37 * hash) + VERSION_FIELD_NUMBER;
- hash = (53 * hash) + getVersion();
- }
- if (hasOwner()) {
- hash = (37 * hash) + OWNER_FIELD_NUMBER;
- hash = (53 * hash) + getOwner().hashCode();
- }
- if (hasUsersAndPermissions()) {
- hash = (37 * hash) + USERS_AND_PERMISSIONS_FIELD_NUMBER;
- hash = (53 * hash) + getUsersAndPermissions().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- memoizedHashCode = hash;
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code hbase.pb.SnapshotDescription}
- *
- * <pre>
- **
- * Description of the snapshot to take
- * </pre>
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getUsersAndPermissionsFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- name_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- table_ = "";
- bitField0_ = (bitField0_ & ~0x00000002);
- creationTime_ = 0L;
- bitField0_ = (bitField0_ & ~0x00000004);
- type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
- bitField0_ = (bitField0_ & ~0x00000008);
- version_ = 0;
- bitField0_ = (bitField0_ & ~0x00000010);
- owner_ = "";
- bitField0_ = (bitField0_ & ~0x00000020);
- if (usersAndPermissionsBuilder_ == null) {
- usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
- } else {
- usersAndPermissionsBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000040);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription build() {
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.name_ = name_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.table_ = table_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- result.creationTime_ = creationTime_;
- if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
- to_bitField0_ |= 0x00000008;
- }
- result.type_ = type_;
- if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
- to_bitField0_ |= 0x00000010;
- }
- result.version_ = version_;
- if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
- to_bitField0_ |= 0x00000020;
- }
- result.owner_ = owner_;
- if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
- to_bitField0_ |= 0x00000040;
- }
- if (usersAndPermissionsBuilder_ == null) {
- result.usersAndPermissions_ = usersAndPermissions_;
- } else {
- result.usersAndPermissions_ = usersAndPermissionsBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) return this;
- if (other.hasName()) {
- bitField0_ |= 0x00000001;
- name_ = other.name_;
- onChanged();
- }
- if (other.hasTable()) {
- bitField0_ |= 0x00000002;
- table_ = other.table_;
- onChanged();
- }
- if (other.hasCreationTime()) {
- setCreationTime(other.getCreationTime());
- }
- if (other.hasType()) {
- setType(other.getType());
- }
- if (other.hasVersion()) {
- setVersion(other.getVersion());
- }
- if (other.hasOwner()) {
- bitField0_ |= 0x00000020;
- owner_ = other.owner_;
- onChanged();
- }
- if (other.hasUsersAndPermissions()) {
- mergeUsersAndPermissions(other.getUsersAndPermissions());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasName()) {
-
- return false;
- }
- if (hasUsersAndPermissions()) {
- if (!getUsersAndPermissions().isInitialized()) {
-
- return false;
- }
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // required string name = 1;
- private java.lang.Object name_ = "";
- /**
- * <code>required string name = 1;</code>
- */
- public boolean hasName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string name = 1;</code>
- */
- public java.lang.String getName() {
- java.lang.Object ref = name_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- name_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>required string name = 1;</code>
- */
- public com.google.protobuf.ByteString
- getNameBytes() {
- java.lang.Object ref = name_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- name_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>required string name = 1;</code>
- */
- public Builder setName(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- name_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>required string name = 1;</code>
- */
- public Builder clearName() {
- bitField0_ = (bitField0_ & ~0x00000001);
- name_ = getDefaultInstance().getName();
- onChanged();
- return this;
- }
- /**
- * <code>required string name = 1;</code>
- */
- public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- name_ = value;
- onChanged();
- return this;
- }
-
- // optional string table = 2;
- private java.lang.Object table_ = "";
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public boolean hasTable() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public java.lang.String getTable() {
- java.lang.Object ref = table_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- table_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public com.google.protobuf.ByteString
- getTableBytes() {
- java.lang.Object ref = table_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- table_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public Builder setTable(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- table_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public Builder clearTable() {
- bitField0_ = (bitField0_ & ~0x00000002);
- table_ = getDefaultInstance().getTable();
- onChanged();
- return this;
- }
- /**
- * <code>optional string table = 2;</code>
- *
- * <pre>
- * not needed for delete, but checked for in taking snapshot
- * </pre>
- */
- public Builder setTableBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- table_ = value;
- onChanged();
- return this;
- }
-
- // optional int64 creation_time = 3 [default = 0];
- private long creationTime_ ;
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public boolean hasCreationTime() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public long getCreationTime() {
- return creationTime_;
- }
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public Builder setCreationTime(long value) {
- bitField0_ |= 0x00000004;
- creationTime_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional int64 creation_time = 3 [default = 0];</code>
- */
- public Builder clearCreationTime() {
- bitField0_ = (bitField0_ & ~0x00000004);
- creationTime_ = 0L;
- onChanged();
- return this;
- }
-
- // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
- private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public boolean hasType() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
- }
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
- return type_;
- }
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public Builder setType(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000008;
- type_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
- */
- public Builder clearType() {
- bitField0_ = (bitField0_ & ~0x00000008);
- type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
- onChanged();
- return this;
- }
-
- // optional int32 version = 5;
- private int version_ ;
- /**
- * <code>optional int32 version = 5;</code>
- */
- public boolean hasVersion() {
- return ((bitField0_ & 0x00000010) == 0x00000010);
- }
- /**
- * <code>optional int32 version = 5;</code>
- */
- public int getVersion() {
- return version_;
- }
- /**
- * <code>optional int32 version = 5;</code>
- */
- public Builder setVersion(int value) {
- bitField0_ |= 0x00000010;
- version_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional int32 version = 5;</code>
- */
- public Builder clearVersion() {
- bitField0_ = (bitField0_ & ~0x00000010);
- version_ = 0;
- onChanged();
- return this;
- }
-
- // optional string owner = 6;
- private java.lang.Object owner_ = "";
- /**
- * <code>optional string owner = 6;</code>
- */
- public boolean hasOwner() {
- return ((bitField0_ & 0x00000020) == 0x00000020);
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public java.lang.String getOwner() {
- java.lang.Object ref = owner_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- owner_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public com.google.protobuf.ByteString
- getOwnerBytes() {
- java.lang.Object ref = owner_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- owner_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public Builder setOwner(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000020;
- owner_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public Builder clearOwner() {
- bitField0_ = (bitField0_ & ~0x00000020);
- owner_ = getDefaultInstance().getOwner();
- onChanged();
- return this;
- }
- /**
- * <code>optional string owner = 6;</code>
- */
- public Builder setOwnerBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000020;
- owner_ = value;
- onChanged();
- return this;
- }
-
- // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
- private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder> usersAndPermissionsBuilder_;
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public boolean hasUsersAndPermissions() {
- return ((bitField0_ & 0x00000040) == 0x00000040);
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
- if (usersAndPermissionsBuilder_ == null) {
- return usersAndPermissions_;
- } else {
- return usersAndPermissionsBuilder_.getMessage();
- }
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public Builder setUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
- if (usersAndPermissionsBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- usersAndPermissions_ = value;
- onChanged();
- } else {
- usersAndPermissionsBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000040;
- return this;
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public Builder setUsersAndPermissions(
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder builderForValue) {
- if (usersAndPermissionsBuilder_ == null) {
- usersAndPermissions_ = builderForValue.build();
- onChanged();
- } else {
- usersAndPermissionsBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000040;
- return this;
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public Builder mergeUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
- if (usersAndPermissionsBuilder_ == null) {
- if (((bitField0_ & 0x00000040) == 0x00000040) &&
- usersAndPermissions_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance()) {
- usersAndPermissions_ =
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.newBuilder(usersAndPermissions_).mergeFrom(value).buildPartial();
- } else {
- usersAndPermissions_ = value;
- }
- onChanged();
- } else {
- usersAndPermissionsBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000040;
- return this;
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public Builder clearUsersAndPermissions() {
- if (usersAndPermissionsBuilder_ == null) {
- usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
- onChanged();
- } else {
- usersAndPermissionsBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000040);
- return this;
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder getUsersAndPermissionsBuilder() {
- bitField0_ |= 0x00000040;
- onChanged();
- return getUsersAndPermissionsFieldBuilder().getBuilder();
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
- if (usersAndPermissionsBuilder_ != null) {
- return usersAndPermissionsBuilder_.getMessageOrBuilder();
- } else {
- return usersAndPermissions_;
- }
- }
- /**
- * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
- */
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>
- getUsersAndPermissionsFieldBuilder() {
- if (usersAndPermissionsBuilder_ == null) {
- usersAndPermissionsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>(
- usersAndPermissions_,
- getParentForChildren(),
- isClean());
- usersAndPermissions_ = null;
- }
- return usersAndPermissionsBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription)
- }
-
- static {
- defaultInstance = new SnapshotDescription(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription)
- }
-
public interface SnapshotFileInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -6174,11 +4686,6 @@ public final class SnapshotProtos {
}
private static com.google.protobuf.Descriptors.Descriptor
- internal_static_hbase_pb_SnapshotDescription_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SnapshotFileInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -6212,52 +4719,39 @@ public final class SnapshotProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\016Snapshot.proto\022\010hbase.pb\032\023AccessContro" +
- "l.proto\032\010FS.proto\032\013HBase.proto\"\223\002\n\023Snaps" +
- "hotDescription\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 " +
- "\001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\0227\n\004type\030\004 " +
- "\001(\0162\".hbase.pb.SnapshotDescription.Type:" +
- "\005FLUSH\022\017\n\007version\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\022<" +
- "\n\025users_and_permissions\030\007 \001(\0132\035.hbase.pb" +
- ".UsersAndPermissions\".\n\004Type\022\014\n\010DISABLED" +
- "\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\222\001\n\020Snapsho" +
- "tFileInfo\022-\n\004type\030\001 \002(\0162\037.hbase.pb.Snaps",
- "hotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal_s" +
- "erver\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022\t\n" +
- "\005HFILE\020\001\022\007\n\003WAL\020\002\"\323\002\n\026SnapshotRegionMani" +
- "fest\022\017\n\007version\030\001 \001(\005\022)\n\013region_info\030\002 \002" +
- "(\0132\024.hbase.pb.RegionInfo\022B\n\014family_files" +
- "\030\003 \003(\0132,.hbase.pb.SnapshotRegionManifest" +
- ".FamilyFiles\032T\n\tStoreFile\022\014\n\004name\030\001 \002(\t\022" +
- "&\n\treference\030\002 \001(\0132\023.hbase.pb.Reference\022" +
- "\021\n\tfile_size\030\003 \001(\004\032c\n\013FamilyFiles\022\023\n\013fam" +
- "ily_name\030\001 \002(\014\022?\n\013store_files\030\002 \003(\0132*.hb",
- "ase.pb.SnapshotRegionManifest.StoreFile\"" +
- "\177\n\024SnapshotDataManifest\022+\n\014table_schema\030" +
- "\001 \002(\0132\025.hbase.pb.TableSchema\022:\n\020region_m" +
- "anifests\030\002 \003(\0132 .hbase.pb.SnapshotRegion" +
- "ManifestBD\n*org.apache.hadoop.hbase.prot" +
- "obuf.generatedB\016SnapshotProtosH\001\210\001\001\240\001\001"
+ "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" +
+ "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" +
+ " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" +
+ "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" +
+ "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" +
+ "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" +
+ "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" +
+ "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" +
+ "napshotRegionManifest.FamilyFiles\032T\n\tSto" +
+ "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132",
+ "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" +
+ "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" +
+ "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" +
+ "onManifest.StoreFile\"\177\n\024SnapshotDataMani" +
+ "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" +
+ "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" +
+ "ase.pb.SnapshotRegionManifestBD\n*org.apa" +
+ "che.hadoop.hbase.protobuf.generatedB\016Sna" +
+ "pshotProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
- internal_static_hbase_pb_SnapshotDescription_descriptor =
- getDescriptor().getMessageTypes().get(0);
- internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_hbase_pb_SnapshotDescription_descriptor,
- new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", "UsersAndPermissions", });
internal_static_hbase_pb_SnapshotFileInfo_descriptor =
- getDescriptor().getMessageTypes().get(1);
+ getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotFileInfo_descriptor,
new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", });
internal_static_hbase_pb_SnapshotRegionManifest_descriptor =
- getDescriptor().getMessageTypes().get(2);
+ getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotRegionManifest_descriptor,
@@ -6275,7 +4769,7 @@ public final class SnapshotProtos {
internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor,
new java.lang.String[] { "FamilyName", "StoreFiles", });
internal_static_hbase_pb_SnapshotDataManifest_descriptor =
- getDescriptor().getMessageTypes().get(3);
+ getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotDataManifest_descriptor,
@@ -6286,7 +4780,6 @@ public final class SnapshotProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
- org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
}, assigner);