You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/12/15 23:56:15 UTC

[3/3] hbase git commit: HBASE-19498 Fix findbugs and error-prone warnings in hbase-client (branch-2)

HBASE-19498 Fix findbugs and error-prone warnings in hbase-client (branch-2)

Signed-off-by: Michael Stack <st...@apache.org>
Signed-off-by: Apekshit Sharma <ap...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59529a78
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59529a78
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59529a78

Branch: refs/heads/master
Commit: 59529a78f039965a4c08805c9087d82d64621c20
Parents: 20b42d2
Author: Peter Somogyi <ps...@cloudera.com>
Authored: Wed Dec 13 11:44:58 2017 +0100
Committer: Michael Stack <st...@apache.org>
Committed: Fri Dec 15 15:55:51 2017 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/ClusterStatus.java  |  12 +-
 .../apache/hadoop/hbase/HColumnDescriptor.java  |   1 +
 .../org/apache/hadoop/hbase/HRegionInfo.java    |  42 +++-
 .../hbase/InvalidFamilyOperationException.java  |   2 +-
 .../hadoop/hbase/MasterNotRunningException.java |   2 +-
 .../hadoop/hbase/NotServingRegionException.java |   2 +-
 .../apache/hadoop/hbase/RegionLocations.java    |   2 +-
 .../hadoop/hbase/TableExistsException.java      |   2 +-
 .../hadoop/hbase/TableNotDisabledException.java |   2 +-
 .../hbase/ZooKeeperConnectionException.java     |   2 +-
 .../org/apache/hadoop/hbase/client/Append.java  |   2 +
 .../client/AsyncAdminRequestRetryingCaller.java |   1 +
 .../AsyncMasterRequestRpcRetryingCaller.java    |   1 +
 .../hadoop/hbase/client/AsyncProcess.java       |   6 +-
 .../AsyncServerRequestRpcRetryingCaller.java    |   1 +
 .../hbase/client/BufferedMutatorParams.java     |   1 +
 .../client/ColumnFamilyDescriptorBuilder.java   |   1 +
 .../client/FastFailInterceptorContext.java      |   3 +
 .../org/apache/hadoop/hbase/client/Get.java     |   1 +
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  80 ++++----
 .../hadoop/hbase/client/HTableMultiplexer.java  |   2 +-
 .../apache/hadoop/hbase/client/Increment.java   |   4 +-
 .../client/NoServerForRegionException.java      |   2 +-
 .../client/PerClientRandomNonceGenerator.java   |   2 +
 .../hadoop/hbase/client/RegionInfoBuilder.java  |   2 +-
 .../hbase/client/RegionServerCallable.java      |   5 +
 .../org/apache/hadoop/hbase/client/Scan.java    |   1 +
 .../coprocessor/LongColumnInterpreter.java      |   3 +-
 .../client/security/SecurityCapability.java     |   2 +-
 .../hadoop/hbase/filter/BinaryComparator.java   |   2 +
 .../hbase/filter/BinaryPrefixComparator.java    |   2 +
 .../hadoop/hbase/filter/BitComparator.java      |   2 +
 .../hbase/filter/ColumnCountGetFilter.java      |   2 +
 .../hbase/filter/ColumnPaginationFilter.java    |   2 +
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |   2 +
 .../hadoop/hbase/filter/ColumnRangeFilter.java  |   5 +-
 .../hadoop/hbase/filter/CompareFilter.java      |   1 +
 .../hbase/filter/DependentColumnFilter.java     |   2 +
 .../hadoop/hbase/filter/FamilyFilter.java       |   2 +
 .../apache/hadoop/hbase/filter/FilterBase.java  |   5 +
 .../apache/hadoop/hbase/filter/FilterList.java  |   2 +
 .../hadoop/hbase/filter/FirstKeyOnlyFilter.java |   3 +
 .../FirstKeyValueMatchingQualifiersFilter.java  |   2 +
 .../hadoop/hbase/filter/FuzzyRowFilter.java     |  12 ++
 .../hbase/filter/InclusiveStopFilter.java       |   4 +
 .../hadoop/hbase/filter/KeyOnlyFilter.java      |   2 +
 .../hbase/filter/MultiRowRangeFilter.java       |   2 +
 .../filter/MultipleColumnPrefixFilter.java      |   2 +
 .../hadoop/hbase/filter/NullComparator.java     |   2 +
 .../apache/hadoop/hbase/filter/PageFilter.java  |  26 ++-
 .../apache/hadoop/hbase/filter/ParseFilter.java |   7 +-
 .../hadoop/hbase/filter/PrefixFilter.java       |   6 +
 .../hadoop/hbase/filter/QualifierFilter.java    |   2 +
 .../hadoop/hbase/filter/RandomRowFilter.java    |   5 +-
 .../apache/hadoop/hbase/filter/RowFilter.java   |   2 +
 .../filter/SingleColumnValueExcludeFilter.java  |   3 +
 .../hbase/filter/SingleColumnValueFilter.java   |   6 +
 .../apache/hadoop/hbase/filter/SkipFilter.java  |   5 +
 .../hbase/filter/SubstringComparator.java       |   2 +
 .../hadoop/hbase/filter/TimestampsFilter.java   |   3 +
 .../apache/hadoop/hbase/filter/ValueFilter.java |   3 +-
 .../hadoop/hbase/filter/WhileMatchFilter.java   |   4 +
 .../hadoop/hbase/ipc/AbstractRpcClient.java     |   4 +-
 .../hadoop/hbase/ipc/BlockingRpcClient.java     |   1 +
 .../apache/hadoop/hbase/ipc/ConnectionId.java   |  10 +-
 .../hadoop/hbase/quotas/QuotaRetriever.java     |   1 +
 .../RegionServerRunningException.java           |   2 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |   7 +-
 .../hbase/security/access/Permission.java       |   2 +-
 .../security/visibility/VisibilityClient.java   | 197 ++++++++++---------
 .../shaded/protobuf/ResponseConverter.java      |   2 +-
 .../org/apache/hadoop/hbase/util/PoolMap.java   |   4 +-
 .../hadoop/hbase/TestHColumnDescriptor.java     |  21 +-
 .../hadoop/hbase/TestHTableDescriptor.java      |   2 +-
 .../hbase/TestInterfaceAudienceAnnotations.java |  10 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 118 ++++++-----
 .../hbase/client/TestBufferedMutatorParams.java |  20 +-
 .../client/TestClientExponentialBackoff.java    |   8 +-
 .../hadoop/hbase/client/TestClientScanner.java  |  27 ++-
 .../TestColumnFamilyDescriptorBuilder.java      |  26 ++-
 .../hadoop/hbase/client/TestDelayingRunner.java |   5 +-
 .../hadoop/hbase/client/TestOperation.java      |  22 ++-
 .../hbase/client/TestRegionInfoDisplay.java     |  38 ++--
 .../client/TestSimpleRequestController.java     |  33 ++--
 .../hbase/client/TestSnapshotFromAdmin.java     |   2 +-
 .../client/TestTableDescriptorBuilder.java      |   2 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  |  21 +-
 .../hadoop/hbase/security/TestSaslUtil.java     |  10 +-
 .../org/apache/hadoop/hbase/ServerName.java     |  23 ++-
 .../replication/ReplicationTrackerZKImpl.java   |   5 +
 .../hadoop/hbase/filter/FilterWrapper.java      |   2 +
 .../hadoop/hbase/zookeeper/EmptyWatcher.java    |   1 +
 .../hadoop/hbase/zookeeper/HQuorumPeer.java     |   3 +-
 .../hadoop/hbase/zookeeper/ZKLeaderManager.java |  11 +-
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   |   9 +-
 .../hadoop/hbase/zookeeper/TestZKUtil.java      |   8 +-
 96 files changed, 617 insertions(+), 361 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 693b418..f06d9b9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -185,7 +185,7 @@ public class ClusterStatus {
     int count = 0;
     if (liveServers != null && !liveServers.isEmpty()) {
       for (Map.Entry<ServerName, ServerLoad> e: this.liveServers.entrySet()) {
-        count += e.getValue().getNumberOfRegions();
+        count = count + e.getValue().getNumberOfRegions();
       }
     }
     return count;
@@ -217,9 +217,7 @@ public class ClusterStatus {
     return hbaseVersion;
   }
 
-  /**
-   * @see java.lang.Object#equals(java.lang.Object)
-   */
+  @Override
   public boolean equals(Object o) {
     if (this == o) {
       return true;
@@ -238,16 +236,13 @@ public class ClusterStatus {
       getMasterInfoPort() == other.getMasterInfoPort();
   }
 
-  /**
-   * @see java.lang.Object#hashCode()
-   */
+  @Override
   public int hashCode() {
     return Objects.hashCode(hbaseVersion, liveServers, deadServers, master, backupMasters,
       clusterId, masterInfoPort);
   }
 
   /**
-   *
    * @return the object version number
    * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
    */
@@ -352,6 +347,7 @@ public class ClusterStatus {
     return masterInfoPort;
   }
 
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(1024);
     sb.append("Master: " + master);

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index be59f8c..7039208 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -428,6 +428,7 @@ public class HColumnDescriptor implements ColumnFamilyDescriptor, Comparable<HCo
    * will mask a later Put with lower ts. Set this to true to enable new semantics of versions.
    * We will also consider mvcc in versions. See HBASE-15968 for details.
    */
+  @Override
   public boolean isNewVersionBehavior() {
     return delegatee.isNewVersionBehavior();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index f2cb821..930a0a3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -120,6 +120,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   /**
    * @return Return a short, printable name for this region (usually encoded name) for us logging.
    */
+  @Override
   public String getShortNameToLog() {
     return prettyPrint(this.getEncodedName());
   }
@@ -189,7 +190,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
 
   private void setHashCode() {
     int result = Arrays.hashCode(this.regionName);
-    result ^= this.regionId;
+    result = (int) (result ^ this.regionId);
     result ^= Arrays.hashCode(this.startKey);
     result ^= Arrays.hashCode(this.endKey);
     result ^= Boolean.valueOf(this.offLine).hashCode();
@@ -473,6 +474,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   }
 
   /** @return the regionId */
+  @Override
   public long getRegionId(){
     return regionId;
   }
@@ -481,6 +483,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
    * @return the regionName as an array of bytes.
    * @see #getRegionNameAsString()
    */
+  @Override
   public byte [] getRegionName(){
     return regionName;
   }
@@ -488,6 +491,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   /**
    * @return Region name as a String for use in logging, etc.
    */
+  @Override
   public String getRegionNameAsString() {
     if (RegionInfo.hasEncodedName(this.regionName)) {
       // new format region names already have their encoded name.
@@ -500,7 +504,10 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
     return Bytes.toStringBinary(this.regionName) + "." + this.getEncodedName();
   }
 
-  /** @return the encoded region name */
+  /**
+   * @return the encoded region name
+   */
+  @Override
   public synchronized String getEncodedName() {
     if (this.encodedName == null) {
       this.encodedName = RegionInfo.encodeRegionName(this.regionName);
@@ -508,6 +515,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
     return this.encodedName;
   }
 
+  @Override
   public synchronized byte [] getEncodedNameAsBytes() {
     if (this.encodedNameAsBytes == null) {
       this.encodedNameAsBytes = Bytes.toBytes(getEncodedName());
@@ -515,12 +523,18 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
     return this.encodedNameAsBytes;
   }
 
-  /** @return the startKey */
+  /**
+   * @return the startKey
+   */
+  @Override
   public byte [] getStartKey(){
     return startKey;
   }
 
-  /** @return the endKey */
+  /**
+   * @return the endKey
+   */
+  @Override
   public byte [] getEndKey(){
     return endKey;
   }
@@ -529,6 +543,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
    * Get current table name of the region
    * @return TableName
    */
+  @Override
   public TableName getTable() {
     // This method name should be getTableName but there was already a method getTableName
     // that returned a byte array.  It is unfortunate given everywhere else, getTableName returns
@@ -546,6 +561,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
    * ["b","z"] it will return false.
    * @throws IllegalArgumentException if the range passed is invalid (ie. end &lt; start)
    */
+  @Override
   public boolean containsRange(byte[] rangeStartKey, byte[] rangeEndKey) {
     if (Bytes.compareTo(rangeStartKey, rangeEndKey) > 0) {
       throw new IllegalArgumentException(
@@ -561,8 +577,9 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   }
 
   /**
-   * Return true if the given row falls in this region.
+   * @return true if the given row falls in this region.
    */
+  @Override
   public boolean containsRow(byte[] row) {
     return Bytes.compareTo(row, startKey) >= 0 &&
       (Bytes.compareTo(row, endKey) < 0 ||
@@ -576,7 +593,10 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
     return isMetaRegion();
   }
 
-  /** @return true if this region is a meta region */
+  /**
+   * @return true if this region is a meta region
+   */
+  @Override
   public boolean isMetaRegion() {
      return tableName.equals(HRegionInfo.FIRST_META_REGIONINFO.getTable());
   }
@@ -589,8 +609,9 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   }
 
   /**
-   * @return True if has been split and has daughters.
+   * @return true if has been split and has daughters.
    */
+  @Override
   public boolean isSplit() {
     return this.split;
   }
@@ -603,8 +624,9 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   }
 
   /**
-   * @return True if this region is offline.
+   * @return true if this region is offline.
    */
+  @Override
   public boolean isOffline() {
     return this.offLine;
   }
@@ -619,8 +641,9 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
   }
 
   /**
-   * @return True if this is a split parent region.
+   * @return true if this is a split parent region.
    */
+  @Override
   public boolean isSplitParent() {
     if (!isSplit()) return false;
     if (!isOffline()) {
@@ -633,6 +656,7 @@ public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
    * Returns the region replica id
    * @return returns region replica id
    */
+  @Override
   public int getReplicaId() {
     return replicaId;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
index e9f00f3..63c26e2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
@@ -26,7 +26,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class InvalidFamilyOperationException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 22 - 1L;
+  private static final long serialVersionUID = (1L << 22) - 1L;
   /** default constructor */
   public InvalidFamilyOperationException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
index ae47995..1ff17ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
@@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class MasterNotRunningException extends IOException {
-  private static final long serialVersionUID = 1L << 23 - 1L;
+  private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public MasterNotRunningException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
index 8b43886..6d3ae0c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  */
 @InterfaceAudience.Public
 public class NotServingRegionException extends IOException {
-  private static final long serialVersionUID = 1L << 17 - 1L;
+  private static final long serialVersionUID = (1L << 17) - 1L;
 
   /** default constructor */
   public NotServingRegionException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index 8b3fbb4..8889dc2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase;
 import java.util.Collection;
 
 import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Container for holding a list of {@link HRegionLocation}'s that correspond to the

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
index 3e5bc8b..69929d8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
@@ -25,7 +25,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class TableExistsException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 7 - 1L;
+  private static final long serialVersionUID = (1L << 7) - 1L;
   /** default constructor */
   public TableExistsException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
index b0d396d..813c4e9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  */
 @InterfaceAudience.Public
 public class TableNotDisabledException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 19 - 1L;
+  private static final long serialVersionUID = (1L << 19) - 1L;
   /** default constructor */
   public TableNotDisabledException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
index 60776da..6c61468 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
@@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class ZooKeeperConnectionException extends IOException {
-  private static final long serialVersionUID = 1L << 23 - 1L;
+  private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public ZooKeeperConnectionException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index da07ea6..24e9512 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -86,6 +86,7 @@ public class Append extends Mutation {
    *          A client that is not interested in the result can save network
    *          bandwidth setting this to false.
    */
+  @Override
   public Append setReturnResults(boolean returnResults) {
     super.setReturnResults(returnResults);
     return this;
@@ -95,6 +96,7 @@ public class Append extends Mutation {
    * @return current setting for returnResults
    */
   // This method makes public the superclasses's protected method.
+  @Override
   public boolean isReturnResults() {
     return super.isReturnResults();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
index f168cbf..a320c66 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdminRequestRetryingCaller.java
@@ -70,6 +70,7 @@ public class AsyncAdminRequestRetryingCaller<T> extends AsyncRpcRetryingCaller<T
     });
   }
 
+  @Override
   CompletableFuture<T> call() {
     doCall();
     return future;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
index 94220d4..c6a2335 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMasterRequestRpcRetryingCaller.java
@@ -67,6 +67,7 @@ public class AsyncMasterRequestRpcRetryingCaller<T> extends AsyncRpcRetryingCall
     });
   }
 
+  @Override
   public CompletableFuture<T> call() {
     doCall();
     return future;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 5e0da59..f6e7739 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -153,7 +153,7 @@ class AsyncProcess {
   final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified
   final int numTries;
   @VisibleForTesting
-  int serverTrackerTimeout;
+  long serverTrackerTimeout;
   final long primaryCallTimeoutMicroseconds;
   /** Whether to log details for batch errors */
   final boolean logBatchErrorDetails;
@@ -204,9 +204,9 @@ class AsyncProcess {
     // If we keep hitting one server, the net effect will be the incremental backoff, and
     // essentially the same number of retries as planned. If we have to do faster retries,
     // we will do more retries in aggregate, but the user will be none the wiser.
-    this.serverTrackerTimeout = 0;
+    this.serverTrackerTimeout = 0L;
     for (int i = 0; i < this.numTries; ++i) {
-      serverTrackerTimeout += ConnectionUtils.getPauseTime(this.pause, i);
+      serverTrackerTimeout = serverTrackerTimeout + ConnectionUtils.getPauseTime(this.pause, i);
     }
 
     this.rpcCallerFactory = rpcCaller;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java
index 07c9a0b..dbbe999 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncServerRequestRpcRetryingCaller.java
@@ -72,6 +72,7 @@ public class AsyncServerRequestRpcRetryingCaller<T> extends AsyncRpcRetryingCall
     });
   }
 
+  @Override
   CompletableFuture<T> call() {
     doCall();
     return future;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
index fdb1a4a..0648501 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
@@ -151,6 +151,7 @@ public class BufferedMutatorParams implements Cloneable {
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="CN_IDIOM_NO_SUPER_CALL",
     justification="The clone below is complete")
+  @Override
   public BufferedMutatorParams clone() {
     BufferedMutatorParams clone = new BufferedMutatorParams(this.tableName);
     clone.writeBufferSize = this.writeBufferSize;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
index f3786e7..6a30de5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
@@ -895,6 +895,7 @@ public class ColumnFamilyDescriptorBuilder {
      * will mask a later Put with lower ts. Set this to true to enable new semantics of versions.
      * We will also consider mvcc in versions. See HBASE-15968 for details.
      */
+    @Override
     public boolean isNewVersionBehavior() {
       return getStringOrDefault(NEW_VERSION_BEHAVIOR_BYTES,
           Boolean::parseBoolean, DEFAULT_NEW_VERSION_BEHAVIOR);

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
index 0ea165e..6b0e790 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
@@ -107,6 +107,7 @@ class FastFailInterceptorContext extends RetryingCallerInterceptorContext {
     this.tries = tries;
   }
 
+  @Override
   public void clear() {
     server = null;
     fInfo = null;
@@ -117,10 +118,12 @@ class FastFailInterceptorContext extends RetryingCallerInterceptorContext {
     tries = 0;
   }
 
+  @Override
   public FastFailInterceptorContext prepare(RetryingCallable<?> callable) {
     return prepare(callable, 0);
   }
 
+  @Override
   public FastFailInterceptorContext prepare(RetryingCallable<?> callable, int tries) {
     if (callable instanceof RegionServerCallable) {
       RegionServerCallable<?, ?> retryingCallable = (RegionServerCallable<?, ?>) callable;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index adce567..059a5fd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -296,6 +296,7 @@ public class Get extends Query
     return this;
   }
 
+  @Override
   public Get setLoadColumnFamiliesOnDemand(boolean value) {
     return (Get) super.setLoadColumnFamiliesOnDemand(value);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index cd5e60e..2ea7c74 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1,5 +1,4 @@
 /*
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -25,6 +24,7 @@ import com.google.protobuf.RpcController;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -313,7 +313,8 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables) throws IOException {
+  public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables)
+      throws IOException {
     return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(),
         getRpcControllerFactory()) {
       @Override
@@ -327,7 +328,8 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public TableDescriptor getDescriptor(TableName tableName) throws TableNotFoundException, IOException {
+  public TableDescriptor getDescriptor(TableName tableName)
+      throws TableNotFoundException, IOException {
     return getTableDescriptor(tableName, getConnection(), rpcCallerFactory, rpcControllerFactory,
        operationTimeout, rpcTimeout);
   }
@@ -377,7 +379,8 @@ public class HBaseAdmin implements Admin {
       protected List<TableDescriptor> rpcCall() throws Exception {
         GetTableDescriptorsRequest req =
             RequestConverter.buildGetTableDescriptorsRequest(tableNames);
-          return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), req));
+          return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(),
+              req));
       }
     });
   }
@@ -547,20 +550,23 @@ public class HBaseAdmin implements Admin {
   static HTableDescriptor getHTableDescriptor(final TableName tableName, Connection connection,
       RpcRetryingCallerFactory rpcCallerFactory, final RpcControllerFactory rpcControllerFactory,
       int operationTimeout, int rpcTimeout) throws IOException {
-    if (tableName == null) return null;
+    if (tableName == null) {
+      return null;
+    }
     HTableDescriptor htd =
         executeCallable(new MasterCallable<HTableDescriptor>(connection, rpcControllerFactory) {
-      @Override
-      protected HTableDescriptor rpcCall() throws Exception {
-        GetTableDescriptorsRequest req =
-            RequestConverter.buildGetTableDescriptorsRequest(tableName);
-        GetTableDescriptorsResponse htds = master.getTableDescriptors(getRpcController(), req);
-        if (!htds.getTableSchemaList().isEmpty()) {
-          return new ImmutableHTableDescriptor(ProtobufUtil.toTableDescriptor(htds.getTableSchemaList().get(0)));
-        }
-        return null;
-      }
-    }, rpcCallerFactory, operationTimeout, rpcTimeout);
+          @Override
+          protected HTableDescriptor rpcCall() throws Exception {
+            GetTableDescriptorsRequest req =
+                RequestConverter.buildGetTableDescriptorsRequest(tableName);
+            GetTableDescriptorsResponse htds = master.getTableDescriptors(getRpcController(), req);
+            if (!htds.getTableSchemaList().isEmpty()) {
+              return new ImmutableHTableDescriptor(
+                  ProtobufUtil.toTableDescriptor(htds.getTableSchemaList().get(0)));
+            }
+            return null;
+          }
+        }, rpcCallerFactory, operationTimeout, rpcTimeout);
     if (htd != null) {
       return new ImmutableHTableDescriptor(htd);
     }
@@ -1146,7 +1152,6 @@ public class HBaseAdmin implements Admin {
   }
 
   /**
-   *
    * @param sn
    * @return List of {@link HRegionInfo}.
    * @throws IOException
@@ -1573,9 +1578,8 @@ public class HBaseAdmin implements Admin {
   public boolean cleanerChoreSwitch(final boolean on) throws IOException {
     return executeCallable(new MasterCallable<Boolean>(getConnection(), getRpcControllerFactory()) {
       @Override public Boolean rpcCall() throws Exception {
-        return master.setCleanerChoreRunning(getRpcController(), RequestConverter
-                                                                   .buildSetCleanerChoreRunningRequest(
-                                                                     on)).getPrevValue();
+        return master.setCleanerChoreRunning(getRpcController(),
+            RequestConverter.buildSetCleanerChoreRunningRequest(on)).getPrevValue();
       }
     });
   }
@@ -1584,10 +1588,8 @@ public class HBaseAdmin implements Admin {
   public boolean runCleanerChore() throws IOException {
     return executeCallable(new MasterCallable<Boolean>(getConnection(), getRpcControllerFactory()) {
       @Override public Boolean rpcCall() throws Exception {
-        return master
-                 .runCleanerChore(getRpcController(), RequestConverter
-                                                        .buildRunCleanerChoreRequest())
-                 .getCleanerChoreRan();
+        return master.runCleanerChore(getRpcController(),
+            RequestConverter.buildRunCleanerChoreRequest()).getCleanerChoreRan();
       }
     });
   }
@@ -1597,8 +1599,7 @@ public class HBaseAdmin implements Admin {
     return executeCallable(new MasterCallable<Boolean>(getConnection(), getRpcControllerFactory()) {
       @Override public Boolean rpcCall() throws Exception {
         return master.isCleanerChoreEnabled(getRpcController(),
-                                            RequestConverter.buildIsCleanerChoreEnabledRequest())
-                     .getValue();
+            RequestConverter.buildIsCleanerChoreEnabledRequest()).getValue();
       }
     });
   }
@@ -1676,7 +1677,8 @@ public class HBaseAdmin implements Admin {
     byte[][] encodedNameofRegionsToMerge = new byte[nameofRegionsToMerge.length][];
     for(int i = 0; i < nameofRegionsToMerge.length; i++) {
       encodedNameofRegionsToMerge[i] = HRegionInfo.isEncodedRegionName(nameofRegionsToMerge[i]) ?
-        nameofRegionsToMerge[i] : HRegionInfo.encodeRegionName(nameofRegionsToMerge[i]).getBytes();
+        nameofRegionsToMerge[i] : HRegionInfo.encodeRegionName(nameofRegionsToMerge[i])
+          .getBytes(StandardCharsets.UTF_8);
     }
 
     TableName tableName = null;
@@ -1774,7 +1776,7 @@ public class HBaseAdmin implements Admin {
   public Future<Void> splitRegionAsync(byte[] regionName, byte[] splitPoint)
       throws IOException {
     byte[] encodedNameofRegionToSplit = HRegionInfo.isEncodedRegionName(regionName) ?
-        regionName : HRegionInfo.encodeRegionName(regionName).getBytes();
+        regionName : HRegionInfo.encodeRegionName(regionName).getBytes(StandardCharsets.UTF_8);
     Pair<RegionInfo, ServerName> pair = getRegion(regionName);
     if (pair != null) {
       if (pair.getFirst() != null &&
@@ -2355,10 +2357,9 @@ public class HBaseAdmin implements Admin {
       protected HTableDescriptor[] rpcCall() throws Exception {
         GetTableDescriptorsRequest req =
             RequestConverter.buildGetTableDescriptorsRequest(tableNames);
-          return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), req))
-                  .stream()
-                  .map(ImmutableHTableDescriptor::new)
-                  .toArray(HTableDescriptor[]::new);
+        return ProtobufUtil
+            .toTableDescriptorList(master.getTableDescriptors(getRpcController(), req)).stream()
+            .map(ImmutableHTableDescriptor::new).toArray(HTableDescriptor[]::new);
       }
     });
   }
@@ -2746,8 +2747,8 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public byte[] execProcedureWithReturn(String signature, String instance, Map<String, String> props)
-      throws IOException {
+  public byte[] execProcedureWithReturn(String signature, String instance, Map<String,
+      String> props) throws IOException {
     ProcedureDescription desc = ProtobufUtil.buildProcedureDescription(signature, instance, props);
     final ExecProcedureRequest request =
         ExecProcedureRequest.newBuilder().setProcedure(desc).build();
@@ -2833,7 +2834,8 @@ public class HBaseAdmin implements Admin {
   private Future<Void> internalRestoreSnapshotAsync(final String snapshotName,
       final TableName tableName, final boolean restoreAcl)
       throws IOException, RestoreSnapshotException {
-    final SnapshotProtos.SnapshotDescription snapshot = SnapshotProtos.SnapshotDescription.newBuilder()
+    final SnapshotProtos.SnapshotDescription snapshot =
+        SnapshotProtos.SnapshotDescription.newBuilder()
         .setName(snapshotName).setTable(tableName.getNameAsString()).build();
 
     // actually restore the snapshot
@@ -2977,9 +2979,8 @@ public class HBaseAdmin implements Admin {
       try {
         internalDeleteSnapshot(snapshot);
       } catch (IOException ex) {
-        LOG.info(
-          "Failed to delete snapshot " + snapshot.getName() + " for table " + snapshot.getTableNameAsString(),
-          ex);
+        LOG.info("Failed to delete snapshot " + snapshot.getName() + " for table "
+                + snapshot.getTableNameAsString(), ex);
       }
     }
   }
@@ -3991,7 +3992,8 @@ public class HBaseAdmin implements Admin {
               getRpcControllerFactory()) {
       @Override
       public List<ServerName> rpcCall() throws ServiceException {
-        ListDecommissionedRegionServersRequest req = ListDecommissionedRegionServersRequest.newBuilder().build();
+        ListDecommissionedRegionServersRequest req =
+            ListDecommissionedRegionServersRequest.newBuilder().build();
         List<ServerName> servers = new ArrayList<>();
         for (HBaseProtos.ServerName server : master
             .listDecommissionedRegionServers(getRpcController(), req).getServerNameList()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 3a6e3b4..77d4fb2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -468,7 +468,7 @@ public class HTableMultiplexer {
     }
 
     public long getTotalBufferedCount() {
-      return queue.size() + currentProcessingCount.get();
+      return (long) queue.size() + currentProcessingCount.get();
     }
 
     public AtomicAverageCounter getAverageLatencyCounter() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
index 52c0c59..27cdafe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
@@ -47,7 +47,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class Increment extends Mutation implements Comparable<Row> {
-  private static final long HEAP_OVERHEAD =  ClassSize.REFERENCE + ClassSize.TIMERANGE;
+  private static final int HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE;
   private TimeRange tr = new TimeRange();
 
   /**
@@ -164,6 +164,7 @@ public class Increment extends Mutation implements Comparable<Row> {
    *          client that is not interested in the result can save network bandwidth setting this
    *          to false.
    */
+  @Override
   public Increment setReturnResults(boolean returnResults) {
     super.setReturnResults(returnResults);
     return this;
@@ -173,6 +174,7 @@ public class Increment extends Mutation implements Comparable<Row> {
    * @return current setting for returnResults
    */
   // This method makes public the superclasses's protected method.
+  @Override
   public boolean isReturnResults() {
     return super.isReturnResults();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
index 2f69d12..184f0c0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
@@ -25,7 +25,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public class NoServerForRegionException extends DoNotRetryRegionException {
-  private static final long serialVersionUID = 1L << 11 - 1L;
+  private static final long serialVersionUID = (1L << 11) - 1L;
 
   /** default constructor */
   public NoServerForRegionException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
index ae75d74..c492282 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
@@ -41,10 +41,12 @@ public final class PerClientRandomNonceGenerator implements NonceGenerator {
     this.clientId = (((long) Arrays.hashCode(clientIdBase)) << 32) + rdm.nextInt();
   }
 
+  @Override
   public long getNonceGroup() {
     return this.clientId;
   }
 
+  @Override
   public long newNonce() {
     long result = HConstants.NO_NONCE;
     do {

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
index 29e146d..e17e307 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
@@ -168,7 +168,7 @@ public class RegionInfoBuilder {
         final byte[] endKey, final long regionId,
         final int replicaId, boolean offLine, byte[] regionName) {
       int result = Arrays.hashCode(regionName);
-      result ^= regionId;
+      result = (int) (result ^ regionId);
       result ^= Arrays.hashCode(checkStartKey(startKey));
       result ^= Arrays.hashCode(checkEndKey(endKey));
       result ^= Boolean.valueOf(offLine).hashCode();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCallable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCallable.java
index 1c238b9..9c0f553 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCallable.java
@@ -106,6 +106,7 @@ public abstract class RegionServerCallable<T, S> implements RetryingCallable<T>
    * Override that changes call Exception from {@link Exception} to {@link IOException}.
    * Also does set up of the rpcController.
    */
+  @Override
   public T call(int callTimeout) throws IOException {
     try {
       // Iff non-null and an instance of a SHADED rpcController, do config! Unshaded -- i.e.
@@ -183,6 +184,7 @@ public abstract class RegionServerCallable<T, S> implements RetryingCallable<T>
 
   protected int getPriority() { return this.priority;}
 
+  @Override
   public void throwable(Throwable t, boolean retrying) {
     if (location != null) {
       getConnection().updateCachedLocations(tableName, location.getRegionInfo().getRegionName(),
@@ -190,10 +192,12 @@ public abstract class RegionServerCallable<T, S> implements RetryingCallable<T>
     }
   }
 
+  @Override
   public String getExceptionMessageAdditionalDetail() {
     return "row '" + Bytes.toString(row) + "' on table '" + tableName + "' at " + location;
   }
 
+  @Override
   public long sleep(long pause, int tries) {
     return ConnectionUtils.getPauseTime(pause, tries);
   }
@@ -208,6 +212,7 @@ public abstract class RegionServerCallable<T, S> implements RetryingCallable<T>
     return this.location.getRegionInfo();
   }
 
+  @Override
   public void prepare(final boolean reload) throws IOException {
     // check table state if this is a retry
     if (reload && tableName != null && !tableName.equals(TableName.META_TABLE_NAME)

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 6357d6d..2667858 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -894,6 +894,7 @@ public class Scan extends Query {
     return allowPartialResults;
   }
 
+  @Override
   public Scan setLoadColumnFamiliesOnDemand(boolean value) {
     return (Scan) super.setLoadColumnFamiliesOnDemand(value);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
index 18b3c5b..2cc4bb2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 public class LongColumnInterpreter extends ColumnInterpreter<Long, Long,
                  EmptyMsg, LongMsg, LongMsg> {
 
+  @Override
   public Long getValue(byte[] colFamily, byte[] colQualifier, Cell kv)
       throws IOException {
     if (kv == null || kv.getValueLength() != Bytes.SIZEOF_LONG)
@@ -50,7 +51,7 @@ public class LongColumnInterpreter extends ColumnInterpreter<Long, Long,
     return PrivateCellUtil.getValueAsLong(kv);
   }
 
-   @Override
+  @Override
   public Long add(Long l1, Long l2) {
     if (l1 == null ^ l2 == null) {
       return (l1 == null) ? l2 : l1; // either of one is null.

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
index 783b569..37a3135 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
@@ -32,7 +32,7 @@ public enum SecurityCapability {
   CELL_AUTHORIZATION(3),
   CELL_VISIBILITY(4);
 
-  private int value;
+  private final int value;
 
   public int getValue() {
     return value;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index dc8eaf2..65eb8b1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -58,6 +58,7 @@ public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayCo
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BinaryComparator.Builder builder =
       ComparatorProtos.BinaryComparator.newBuilder();
@@ -87,6 +88,7 @@ public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayCo
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BinaryComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index 7461f63..7e6ba25 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -63,6 +63,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BinaryPrefixComparator.Builder builder =
       ComparatorProtos.BinaryPrefixComparator.newBuilder();
@@ -92,6 +93,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BinaryPrefixComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index 937edf3..98033ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -67,6 +67,7 @@ public class BitComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BitComparator.Builder builder =
       ComparatorProtos.BitComparator.newBuilder();
@@ -100,6 +101,7 @@ public class BitComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BitComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 120a4bb..3bc1785 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -89,6 +89,7 @@ public class ColumnCountGetFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnCountGetFilter.Builder builder =
       FilterProtos.ColumnCountGetFilter.newBuilder();
@@ -118,6 +119,7 @@ public class ColumnCountGetFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof ColumnCountGetFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 0ab8f1a..63a1174 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -168,6 +168,7 @@ public class ColumnPaginationFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnPaginationFilter.Builder builder =
       FilterProtos.ColumnPaginationFilter.newBuilder();
@@ -207,6 +208,7 @@ public class ColumnPaginationFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof ColumnPaginationFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 393f66d..4811691 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -113,6 +113,7 @@ public class ColumnPrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnPrefixFilter.Builder builder =
       FilterProtos.ColumnPrefixFilter.newBuilder();
@@ -142,6 +143,7 @@ public class ColumnPrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
    if (o == this) return true;
    if (!(o instanceof ColumnPrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 2cd360f..1ffd39e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -149,8 +149,7 @@ public class ColumnRangeFilter extends FilterBase {
 
     int cmpMax = CellUtil.compareQualifiers(c, this.maxColumn, 0, this.maxColumn.length);
 
-    if (this.maxColumnInclusive && cmpMax <= 0 ||
-        !this.maxColumnInclusive && cmpMax < 0) {
+    if ((this.maxColumnInclusive && cmpMax <= 0) || (!this.maxColumnInclusive && cmpMax < 0)) {
       return ReturnCode.INCLUDE;
     }
 
@@ -176,6 +175,7 @@ public class ColumnRangeFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnRangeFilter.Builder builder =
       FilterProtos.ColumnRangeFilter.newBuilder();
@@ -212,6 +212,7 @@ public class ColumnRangeFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
    if (o == this) return true;
    if (!(o instanceof ColumnRangeFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index cbf2b0d..b1e596e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -300,6 +300,7 @@ public abstract class CompareFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof CompareFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index 020fae6..44cdb6f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -237,6 +237,7 @@ public class DependentColumnFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.DependentColumnFilter.Builder builder =
       FilterProtos.DependentColumnFilter.newBuilder();
@@ -288,6 +289,7 @@ public class DependentColumnFilter extends CompareFilter {
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(
       value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof DependentColumnFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index 6745599..df15508 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -99,6 +99,7 @@ public class FamilyFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FamilyFilter.Builder builder =
       FilterProtos.FamilyFilter.newBuilder();
@@ -137,6 +138,7 @@ public class FamilyFilter extends CompareFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FamilyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
index 86a5b39..7401e4c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
@@ -128,6 +128,7 @@ public abstract class FilterBase extends Filter {
    *
    * {@inheritDoc}
    */
+  @Override
   public Cell getNextCellHint(Cell currentCell) throws IOException {
     return null;
   }
@@ -138,6 +139,7 @@ public abstract class FilterBase extends Filter {
    *
    * {@inheritDoc}
    */
+  @Override
   public boolean isFamilyEssential(byte[] name) throws IOException {
     return true;
   }
@@ -155,6 +157,7 @@ public abstract class FilterBase extends Filter {
   /**
    * Return filter's info for debugging and logging purpose.
    */
+  @Override
   public String toString() {
     return this.getClass().getSimpleName();
   }
@@ -162,6 +165,7 @@ public abstract class FilterBase extends Filter {
   /**
    * Return length 0 byte array for Filters that don't require special serialization
    */
+  @Override
   public byte[] toByteArray() throws IOException {
     return new byte[0];
   }
@@ -173,6 +177,7 @@ public abstract class FilterBase extends Filter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter other) {
     return true;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 3b6455e..e16ea0e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -198,6 +198,7 @@ final public class FilterList extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() throws IOException {
     FilterProtos.FilterList.Builder builder = FilterProtos.FilterList.newBuilder();
     builder.setOperator(FilterProtos.FilterList.Operator.valueOf(operator.name()));
@@ -239,6 +240,7 @@ final public class FilterList extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter other) {
     if (other == this) return true;
     if (!(other instanceof FilterList)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index e0244da..d10113f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -41,6 +41,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
   public FirstKeyOnlyFilter() {
   }
 
+  @Override
   public void reset() {
     foundKV = false;
   }
@@ -88,6 +89,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FirstKeyOnlyFilter.Builder builder =
       FilterProtos.FirstKeyOnlyFilter.newBuilder();
@@ -117,6 +119,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FirstKeyOnlyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index f994b15..469d660 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -88,6 +88,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder =
       FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder();
@@ -124,6 +125,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FirstKeyValueMatchingQualifiersFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index d70c282..c29b679 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -263,6 +263,7 @@ public class FuzzyRowFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() {
     FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder();
     for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
@@ -457,45 +458,55 @@ public class FuzzyRowFilter extends FilterBase {
   /** Abstracts directional comparisons based on scan direction. */
   private enum Order {
     ASC {
+      @Override
       public boolean lt(int lhs, int rhs) {
         return lhs < rhs;
       }
 
+      @Override
       public boolean gt(int lhs, int rhs) {
         return lhs > rhs;
       }
 
+      @Override
       public byte inc(byte val) {
         // TODO: what about over/underflow?
         return (byte) (val + 1);
       }
 
+      @Override
       public boolean isMax(byte val) {
         return val == (byte) 0xff;
       }
 
+      @Override
       public byte min() {
         return 0;
       }
     },
     DESC {
+      @Override
       public boolean lt(int lhs, int rhs) {
         return lhs > rhs;
       }
 
+      @Override
       public boolean gt(int lhs, int rhs) {
         return lhs < rhs;
       }
 
+      @Override
       public byte inc(byte val) {
         // TODO: what about over/underflow?
         return (byte) (val - 1);
       }
 
+      @Override
       public boolean isMax(byte val) {
         return val == 0;
       }
 
+      @Override
       public byte min() {
         return (byte) 0xFF;
       }
@@ -618,6 +629,7 @@ public class FuzzyRowFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FuzzyRowFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 6e21ba4..5969ba7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -63,6 +63,7 @@ public class InclusiveStopFilter extends FilterBase {
     return ReturnCode.INCLUDE;
   }
 
+  @Override
   public boolean filterRowKey(Cell firstRowCell) {
     // if stopRowKey is <= buffer, then true, filter row.
     if (filterAllRemaining()) return true;
@@ -71,6 +72,7 @@ public class InclusiveStopFilter extends FilterBase {
     return done;
   }
 
+  @Override
   public boolean filterAllRemaining() {
     return done;
   }
@@ -85,6 +87,7 @@ public class InclusiveStopFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.InclusiveStopFilter.Builder builder =
       FilterProtos.InclusiveStopFilter.newBuilder();
@@ -115,6 +118,7 @@ public class InclusiveStopFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof InclusiveStopFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index b23677b..606728e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -91,6 +91,7 @@ public class KeyOnlyFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.KeyOnlyFilter.Builder builder =
       FilterProtos.KeyOnlyFilter.newBuilder();
@@ -120,6 +121,7 @@ public class KeyOnlyFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof KeyOnlyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index d025349..0911d14 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -146,6 +146,7 @@ public class MultiRowRangeFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() {
     FilterProtos.MultiRowRangeFilter.Builder builder = FilterProtos.MultiRowRangeFilter
         .newBuilder();
@@ -194,6 +195,7 @@ public class MultiRowRangeFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this)
       return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index 88af4f4..90e97c4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -118,6 +118,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.MultipleColumnPrefixFilter.Builder builder =
       FilterProtos.MultipleColumnPrefixFilter.newBuilder();
@@ -155,6 +156,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof MultipleColumnPrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index 6a0f234..08f37c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -67,6 +67,7 @@ public class NullComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.NullComparator.Builder builder =
       ComparatorProtos.NullComparator.newBuilder();
@@ -95,6 +96,7 @@ public class NullComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof NullComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index 8949896..91c071e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -22,12 +22,13 @@ import java.io.IOException;
 import java.util.ArrayList;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+
 /**
  * Implementation of Filter interface that limits results to a specific page
  * size. It terminates scanning once the number of filter-passed rows is &gt;
@@ -75,16 +76,19 @@ public class PageFilter extends FilterBase {
   public ReturnCode filterCell(final Cell ignored) throws IOException {
     return ReturnCode.INCLUDE;
   }
-  
+
+  @Override
   public boolean filterAllRemaining() {
     return this.rowsAccepted >= this.pageSize;
   }
 
+  @Override
   public boolean filterRow() {
     this.rowsAccepted++;
     return this.rowsAccepted > this.pageSize;
   }
   
+  @Override
   public boolean hasFilterRow() {
     return true;
   }
@@ -99,6 +103,7 @@ public class PageFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.PageFilter.Builder builder =
       FilterProtos.PageFilter.newBuilder();
@@ -124,13 +129,18 @@ public class PageFilter extends FilterBase {
   }
 
   /**
-   * @param other
-   * @return true if and only if the fields of the filter that are serialized
-   * are equal to the corresponding fields in other.  Used for testing.
+   * @param o other Filter to compare with
+   * @return true if and only if the fields of the filter that are serialized are equal to the
+   *         corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
-    if (o == this) return true;
-    if (!(o instanceof PageFilter)) return false;
+    if (o == this) {
+      return true;
+    }
+    if (!(o instanceof PageFilter)) {
+      return false;
+    }
 
     PageFilter other = (PageFilter)o;
     return this.getPageSize() == other.getPageSize();

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index 7d86baa..6ebe2fe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -22,6 +22,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EmptyStackException;
@@ -261,7 +262,7 @@ public class ParseFilter {
       e.printStackTrace();
     }
     throw new IllegalArgumentException("Incorrect filter string " +
-                                       new String(filterStringAsByteArray));
+        new String(filterStringAsByteArray, StandardCharsets.UTF_8));
   }
 
 /**
@@ -837,9 +838,9 @@ public class ParseFilter {
     else if (Bytes.equals(comparatorType, ParseConstants.binaryPrefixType))
       return new BinaryPrefixComparator(comparatorValue);
     else if (Bytes.equals(comparatorType, ParseConstants.regexStringType))
-      return new RegexStringComparator(new String(comparatorValue));
+      return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
     else if (Bytes.equals(comparatorType, ParseConstants.substringType))
-      return new SubstringComparator(new String(comparatorValue));
+      return new SubstringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
     else
       throw new IllegalArgumentException("Incorrect comparatorType");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 4fb2370..161c1a5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -50,6 +50,7 @@ public class PrefixFilter extends FilterBase {
     return prefix;
   }
 
+  @Override
   public boolean filterRowKey(Cell firstRowCell) {
     if (firstRowCell == null || this.prefix == null)
       return true;
@@ -87,14 +88,17 @@ public class PrefixFilter extends FilterBase {
     return ReturnCode.INCLUDE;
   }
 
+  @Override
   public boolean filterRow() {
     return filterRow;
   }
 
+  @Override
   public void reset() {
     filterRow = true;
   }
 
+  @Override
   public boolean filterAllRemaining() {
     return passedPrefix;
   }
@@ -109,6 +113,7 @@ public class PrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.PrefixFilter.Builder builder =
       FilterProtos.PrefixFilter.newBuilder();
@@ -138,6 +143,7 @@ public class PrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof PrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/59529a78/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index 8f3c859..3d38dc5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -97,6 +97,7 @@ public class QualifierFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.QualifierFilter.Builder builder =
       FilterProtos.QualifierFilter.newBuilder();
@@ -135,6 +136,7 @@ public class QualifierFilter extends CompareFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof QualifierFilter)) return false;