You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2017/11/18 01:22:34 UTC

[06/30] hbase git commit: HBASE-19239 Fix findbugs and error-prone issues

HBASE-19239 Fix findbugs and error-prone issues

Fixes for hbase-client


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3444059a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3444059a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3444059a

Branch: refs/heads/branch-1.4
Commit: 3444059a205a4eeb15a29f45ca609b8579803f5e
Parents: 2d579a4
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Nov 15 18:47:45 2017 -0800
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Nov 17 17:12:04 2017 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/ClusterStatus.java  | 16 +++---
 .../apache/hadoop/hbase/HColumnDescriptor.java  |  2 +
 .../org/apache/hadoop/hbase/HRegionInfo.java    | 15 ++----
 .../hbase/InvalidFamilyOperationException.java  |  2 +-
 .../hadoop/hbase/MasterNotRunningException.java |  2 +-
 .../hadoop/hbase/NotServingRegionException.java |  2 +-
 .../apache/hadoop/hbase/RegionLocations.java    |  5 +-
 .../org/apache/hadoop/hbase/ServerName.java     | 24 +++++++--
 .../hadoop/hbase/TableExistsException.java      |  2 +-
 .../hadoop/hbase/TableNotDisabledException.java |  2 +-
 .../hbase/ZooKeeperConnectionException.java     |  2 +-
 .../org/apache/hadoop/hbase/client/Append.java  |  2 +
 .../hadoop/hbase/client/AsyncProcess.java       |  4 +-
 .../client/FastFailInterceptorContext.java      |  3 ++
 .../org/apache/hadoop/hbase/client/Get.java     |  1 +
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 11 ++--
 .../hadoop/hbase/client/HConnectionKey.java     |  4 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  2 +
 .../hadoop/hbase/client/HTableMultiplexer.java  |  2 +-
 .../apache/hadoop/hbase/client/HTablePool.java  |  5 +-
 .../apache/hadoop/hbase/client/Increment.java   |  4 +-
 .../client/NoServerForRegionException.java      |  2 +-
 .../client/PerClientRandomNonceGenerator.java   |  2 +
 .../hadoop/hbase/client/RegistryFactory.java    |  2 +-
 .../hadoop/hbase/client/RpcRetryingCaller.java  | 11 ++--
 .../org/apache/hadoop/hbase/client/Scan.java    |  1 +
 .../coprocessor/LongColumnInterpreter.java      |  3 +-
 .../client/replication/ReplicationAdmin.java    | 15 +++---
 .../client/security/SecurityCapability.java     |  2 +-
 .../hadoop/hbase/filter/BinaryComparator.java   |  2 +
 .../hbase/filter/BinaryPrefixComparator.java    |  2 +
 .../hadoop/hbase/filter/BitComparator.java      |  2 +
 .../hbase/filter/ColumnCountGetFilter.java      |  2 +
 .../hbase/filter/ColumnPaginationFilter.java    |  2 +
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |  2 +
 .../hadoop/hbase/filter/ColumnRangeFilter.java  |  6 ++-
 .../hadoop/hbase/filter/CompareFilter.java      |  1 +
 .../hbase/filter/DependentColumnFilter.java     |  2 +
 .../hadoop/hbase/filter/FamilyFilter.java       |  2 +
 .../apache/hadoop/hbase/filter/FilterBase.java  |  5 ++
 .../apache/hadoop/hbase/filter/FilterList.java  |  2 +
 .../hadoop/hbase/filter/FilterWrapper.java      |  2 +
 .../hadoop/hbase/filter/FirstKeyOnlyFilter.java |  3 ++
 .../FirstKeyValueMatchingQualifiersFilter.java  |  2 +
 .../hadoop/hbase/filter/FuzzyRowFilter.java     | 12 +++++
 .../hbase/filter/InclusiveStopFilter.java       |  4 ++
 .../hadoop/hbase/filter/KeyOnlyFilter.java      |  2 +
 .../hbase/filter/MultiRowRangeFilter.java       |  2 +
 .../filter/MultipleColumnPrefixFilter.java      |  2 +
 .../hadoop/hbase/filter/NullComparator.java     |  2 +
 .../apache/hadoop/hbase/filter/PageFilter.java  |  7 ++-
 .../apache/hadoop/hbase/filter/ParseFilter.java |  7 +--
 .../hadoop/hbase/filter/PrefixFilter.java       |  6 +++
 .../hadoop/hbase/filter/QualifierFilter.java    |  2 +
 .../hadoop/hbase/filter/RandomRowFilter.java    |  5 +-
 .../apache/hadoop/hbase/filter/RowFilter.java   |  2 +
 .../filter/SingleColumnValueExcludeFilter.java  |  3 ++
 .../hbase/filter/SingleColumnValueFilter.java   |  6 +++
 .../apache/hadoop/hbase/filter/SkipFilter.java  |  5 ++
 .../hbase/filter/SubstringComparator.java       |  2 +
 .../hadoop/hbase/filter/TimestampsFilter.java   |  3 ++
 .../apache/hadoop/hbase/filter/ValueFilter.java |  2 +
 .../hadoop/hbase/filter/WhileMatchFilter.java   |  4 ++
 .../hadoop/hbase/ipc/AbstractRpcClient.java     |  4 +-
 .../hadoop/hbase/ipc/BlockingRpcClient.java     |  1 +
 .../apache/hadoop/hbase/ipc/ConnectionId.java   | 53 +++++++++++++++-----
 .../hbase/protobuf/ResponseConverter.java       |  3 +-
 .../hadoop/hbase/quotas/QuotaRetriever.java     |  1 +
 .../RegionServerRunningException.java           |  2 +-
 .../replication/ReplicationPeersZKImpl.java     |  4 --
 .../replication/ReplicationTrackerZKImpl.java   |  5 ++
 .../apache/hadoop/hbase/security/SaslUtil.java  |  7 +--
 .../hbase/security/access/Permission.java       |  2 +-
 .../security/visibility/VisibilityClient.java   |  4 ++
 .../org/apache/hadoop/hbase/util/PoolMap.java   |  4 +-
 .../hadoop/hbase/zookeeper/EmptyWatcher.java    |  1 +
 .../hadoop/hbase/zookeeper/HQuorumPeer.java     |  3 +-
 .../hadoop/hbase/zookeeper/ZKLeaderManager.java | 11 ++--
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   | 11 ++--
 .../hadoop/hbase/TestHColumnDescriptor.java     |  6 ++-
 .../hadoop/hbase/TestHTableDescriptor.java      |  2 +-
 .../hbase/TestInterfaceAudienceAnnotations.java |  8 +--
 .../hadoop/hbase/client/TestAsyncProcess.java   | 49 ++++++++++--------
 .../client/TestClientExponentialBackoff.java    |  8 +--
 .../hadoop/hbase/client/TestClientScanner.java  | 27 +++++++---
 .../hadoop/hbase/client/TestDelayingRunner.java |  5 +-
 .../hadoop/hbase/client/TestOperation.java      | 22 ++++----
 .../hbase/client/TestSnapshotFromAdmin.java     |  2 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  | 19 ++++---
 .../hadoop/hbase/zookeeper/TestZKUtil.java      | 10 ++--
 90 files changed, 379 insertions(+), 160 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 3ca55fc..10637d6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -172,7 +172,7 @@ public class ClusterStatus extends VersionedWritable {
     int count = 0;
     if (liveServers != null && !liveServers.isEmpty()) {
       for (Map.Entry<ServerName, ServerLoad> e: this.liveServers.entrySet()) {
-        count += e.getValue().getNumberOfRequests();
+        count = (int) (count + e.getValue().getNumberOfRequests());
       }
     }
     return count;
@@ -188,6 +188,7 @@ public class ClusterStatus extends VersionedWritable {
   /**
    * @see java.lang.Object#equals(java.lang.Object)
    */
+  @Override
   public boolean equals(Object o) {
     if (this == o) {
       return true;
@@ -208,6 +209,7 @@ public class ClusterStatus extends VersionedWritable {
   /**
    * @see java.lang.Object#hashCode()
    */
+  @Override
   public int hashCode() {
     return VERSION + hbaseVersion.hashCode() + this.liveServers.hashCode() +
       this.deadServers.hashCode() + this.master.hashCode() +
@@ -215,6 +217,7 @@ public class ClusterStatus extends VersionedWritable {
   }
 
   /** @return the object version number */
+  @Override
   public byte getVersion() {
     return VERSION;
   }
@@ -322,6 +325,7 @@ public class ClusterStatus extends VersionedWritable {
     return balancerOn;
   }
 
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(1024);
     sb.append("Master: " + master);
@@ -440,7 +444,7 @@ public class ClusterStatus extends VersionedWritable {
   public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) {
 
     Map<ServerName, ServerLoad> servers = null;
-    if (proto.getLiveServersList() != null) {
+    if (!proto.getLiveServersList().isEmpty()) {
       servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size());
       for (LiveServerInfo lsi : proto.getLiveServersList()) {
         servers.put(ProtobufUtil.toServerName(
@@ -449,7 +453,7 @@ public class ClusterStatus extends VersionedWritable {
     }
 
     Collection<ServerName> deadServers = null;
-    if (proto.getDeadServersList() != null) {
+    if (!proto.getDeadServersList().isEmpty()) {
       deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size());
       for (HBaseProtos.ServerName sn : proto.getDeadServersList()) {
         deadServers.add(ProtobufUtil.toServerName(sn));
@@ -457,7 +461,7 @@ public class ClusterStatus extends VersionedWritable {
     }
 
     Collection<ServerName> backupMasters = null;
-    if (proto.getBackupMastersList() != null) {
+    if (!proto.getBackupMastersList().isEmpty()) {
       backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size());
       for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) {
         backupMasters.add(ProtobufUtil.toServerName(sn));
@@ -465,7 +469,7 @@ public class ClusterStatus extends VersionedWritable {
     }
 
     Set<RegionState> rit = null;
-    if (proto.getRegionsInTransitionList() != null) {
+    if (!proto.getRegionsInTransitionList().isEmpty()) {
       rit = new HashSet<RegionState>(proto.getRegionsInTransitionList().size());
       for (RegionInTransition region : proto.getRegionsInTransitionList()) {
         RegionState value = RegionState.convert(region.getRegionState());
@@ -474,7 +478,7 @@ public class ClusterStatus extends VersionedWritable {
     }
 
     String[] masterCoprocessors = null;
-    if (proto.getMasterCoprocessorsList() != null) {
+    if (!proto.getMasterCoprocessorsList().isEmpty()) {
       final int numMasterCoprocessors = proto.getMasterCoprocessorsCount();
       masterCoprocessors = new String[numMasterCoprocessors];
       for (int i = 0; i < numMasterCoprocessors; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 9292633..560b74b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -1284,6 +1284,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
    * @deprecated Writables are going away.  Use pb {@link #parseFrom(byte[])} instead.
    */
   @Deprecated
+  @Override
   public void readFields(DataInput in) throws IOException {
     int version = in.readByte();
     if (version < 6) {
@@ -1367,6 +1368,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
    * @deprecated Writables are going away.  Use {@link #toByteArray()} instead.
    */
   @Deprecated
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeByte(COLUMN_DESCRIPTOR_VERSION);
     Bytes.writeByteArray(out, this.name);

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index 09ceeb9..8d93655 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -235,7 +235,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
 
   private void setHashCode() {
     int result = Arrays.hashCode(this.regionName);
-    result ^= this.regionId;
+    result = (int) (result ^ this.regionId);
     result ^= Arrays.hashCode(this.startKey);
     result ^= Arrays.hashCode(this.endKey);
     result ^= Boolean.valueOf(this.offLine).hashCode();
@@ -996,15 +996,6 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
   }
 
   /**
-   * Convert a HRegionInfo to the protobuf RegionInfo
-   *
-   * @return the converted RegionInfo
-   */
-  RegionInfo convert() {
-    return convert(this);
-  }
-
-  /**
    * Convert a HRegionInfo to a RegionInfo
    *
    * @param info the HRegionInfo to convert
@@ -1070,7 +1061,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
    * @see #parseFrom(byte[])
    */
   public byte [] toByteArray() {
-    byte [] bytes = convert().toByteArray();
+    byte [] bytes = convert(this).toByteArray();
     return ProtobufUtil.prependPBMagic(bytes);
   }
 
@@ -1148,7 +1139,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
    * @see #toByteArray()
    */
   public byte [] toDelimitedByteArray() throws IOException {
-    return ProtobufUtil.toDelimitedByteArray(convert());
+    return ProtobufUtil.toDelimitedByteArray(convert(this));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
index 492633c..b6b8bd5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class InvalidFamilyOperationException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 22 - 1L;
+  private static final long serialVersionUID = (1L << 22) - 1L;
   /** default constructor */
   public InvalidFamilyOperationException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
index a85b164..567f30f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
@@ -29,7 +29,7 @@ import java.io.IOException;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class MasterNotRunningException extends IOException {
-  private static final long serialVersionUID = 1L << 23 - 1L;
+  private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public MasterNotRunningException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
index 1523ff6..448c8a9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
@@ -31,7 +31,7 @@ import java.io.IOException;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class NotServingRegionException extends IOException {
-  private static final long serialVersionUID = 1L << 17 - 1L;
+  private static final long serialVersionUID = (1L << 17) - 1L;
 
   /** default constructor */
   public NotServingRegionException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
index 211de17..34836de 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase;
 
 import java.util.Collection;
+import java.util.Objects;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
@@ -214,7 +215,7 @@ public class RegionLocations {
       HRegionLocation selectedLoc = selectRegionLocation(thisLoc,
         otherLoc, true, false);
 
-      if (selectedLoc != thisLoc) {
+      if (!Objects.equals(selectedLoc, thisLoc)) {
         if (newLocations == null) {
           newLocations = new HRegionLocation[max];
           System.arraycopy(locations, 0, newLocations, 0, i);
@@ -277,7 +278,7 @@ public class RegionLocations {
     HRegionLocation selectedLoc = selectRegionLocation(oldLoc, location,
       checkForEquals, force);
 
-    if (selectedLoc == oldLoc) {
+    if (Objects.equals(selectedLoc, oldLoc)) {
       return this;
     }
     HRegionLocation[] newLocations = new HRegionLocation[Math.max(locations.length, replicaId +1)];

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
index 339f588..97e78c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -287,11 +287,27 @@ public class ServerName implements Comparable<ServerName>, Serializable {
 
   @Override
   public int compareTo(ServerName other) {
-    int compare = this.getHostname().compareToIgnoreCase(other.getHostname());
-    if (compare != 0) return compare;
+    int compare;
+    if (other == null) {
+      return -1;
+    }
+    if (this.getHostname() == null) {
+      if (other.getHostname() != null) {
+        return 1;
+      }
+    } else {
+      if (other.getHostname() == null) {
+        return -1;
+      }
+      compare = this.getHostname().compareToIgnoreCase(other.getHostname());
+      if (compare != 0) {
+        return compare;
+      }
+    }
     compare = this.getPort() - other.getPort();
-    if (compare != 0) return compare;
-
+    if (compare != 0) {
+      return compare;
+    }
     return Long.compare(this.getStartcode(), other.getStartcode());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
index 740eb90..1a39497 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.TableName;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class TableExistsException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 7 - 1L;
+  private static final long serialVersionUID = (1L << 7) - 1L;
   /** default constructor */
   public TableExistsException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
index ea707bf..9b11122 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class TableNotDisabledException extends DoNotRetryIOException {
-  private static final long serialVersionUID = 1L << 19 - 1L;
+  private static final long serialVersionUID = (1L << 19) - 1L;
   /** default constructor */
   public TableNotDisabledException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
index 7aebf33..773a64b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
@@ -29,7 +29,7 @@ import java.io.IOException;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class ZooKeeperConnectionException extends IOException {
-  private static final long serialVersionUID = 1L << 23 - 1L;
+  private static final long serialVersionUID = (1L << 23) - 1L;
   /** default constructor */
   public ZooKeeperConnectionException() {
     super();

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 6144570..de1b7fe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -52,6 +52,7 @@ public class Append extends Mutation {
    *          A client that is not interested in the result can save network
    *          bandwidth setting this to false.
    */
+  @Override
   public Append setReturnResults(boolean returnResults) {
     super.setReturnResults(returnResults);
     return this;
@@ -61,6 +62,7 @@ public class Append extends Mutation {
    * @return current setting for returnResults
    */
   // This method makes public the superclasses's protected method.
+  @Override
   public boolean isReturnResults() {
     return super.isReturnResults();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 44a7656..42df848 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -103,6 +103,8 @@ import org.apache.htrace.Trace;
  * </p>
  */
 @InterfaceAudience.Private
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER",
+  justification="Synchronization on tasks in progress counter is intended")
 class AsyncProcess {
   private static final Log LOG = LogFactory.getLog(AsyncProcess.class);
   protected static final AtomicLong COUNTER = new AtomicLong();
@@ -380,7 +382,7 @@ class AsyncProcess {
     // we will do more retries in aggregate, but the user will be none the wiser.
     this.serverTrackerTimeout = 0;
     for (int i = 0; i < this.numTries; ++i) {
-      serverTrackerTimeout += ConnectionUtils.getPauseTime(this.pause, i);
+      serverTrackerTimeout = (int) (serverTrackerTimeout + ConnectionUtils.getPauseTime(this.pause, i));
     }
 
     this.rpcCallerFactory = rpcCaller;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
index 3cbdfb3..2049079 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
@@ -108,6 +108,7 @@ class FastFailInterceptorContext extends
     this.tries = tries;
   }
 
+  @Override
   public void clear() {
     server = null;
     fInfo = null;
@@ -118,10 +119,12 @@ class FastFailInterceptorContext extends
     tries = 0;
   }
 
+  @Override
   public FastFailInterceptorContext prepare(RetryingCallable<?> callable) {
     return prepare(callable, 0);
   }
 
+  @Override
   public FastFailInterceptorContext prepare(RetryingCallable<?> callable,
       int tries) {
     if (callable instanceof RegionServerCallable) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index f8c34bd..6e5ff88 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -243,6 +243,7 @@ public class Get extends Query
     return this;
   }
 
+  @Override
   public Get setLoadColumnFamiliesOnDemand(boolean value) {
     return (Get) super.setLoadColumnFamiliesOnDemand(value);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 0925e38..051a768 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -24,6 +24,7 @@ import com.google.protobuf.ServiceException;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -2460,6 +2461,7 @@ public class HBaseAdmin implements Admin {
    *
    * @return true if region normalizer is enabled, false otherwise.
    */
+  @Override
   public boolean isNormalizerEnabled() throws IOException {
     return executeCallable(new MasterCallable<Boolean>(getConnection()) {
       @Override
@@ -2478,6 +2480,7 @@ public class HBaseAdmin implements Admin {
    *
    * @return Previous normalizer value
    */
+  @Override
   public boolean setNormalizerRunning(final boolean on) throws IOException {
     return executeCallable(new MasterCallable<Boolean>(getConnection()) {
       @Override
@@ -2608,10 +2611,10 @@ public class HBaseAdmin implements Admin {
   public void mergeRegions(final byte[] nameOfRegionA,
       final byte[] nameOfRegionB, final boolean forcible)
       throws IOException {
-    final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ?
-      nameOfRegionA : HRegionInfo.encodeRegionName(nameOfRegionA).getBytes();
-    final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ?
-      nameOfRegionB : HRegionInfo.encodeRegionName(nameOfRegionB).getBytes();
+    final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? nameOfRegionA :
+      HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(StandardCharsets.UTF_8);
+    final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? nameOfRegionB :
+      HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(StandardCharsets.UTF_8);
 
     Pair<HRegionInfo, ServerName> pair = getRegion(nameOfRegionA);
     if (pair != null && pair.getFirst().getReplicaId() != HRegionInfo.DEFAULT_REPLICA_ID)

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
index f37690c..3d3ad33 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Objects;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
@@ -124,8 +125,7 @@ class HConnectionKey {
       for (String property : CONNECTION_PROPERTIES) {
         String thisValue = this.properties.get(property);
         String thatValue = that.properties.get(property);
-        //noinspection StringEquality
-        if (thisValue == thatValue) {
+        if (Objects.equals(thisValue, thatValue)) {
           continue;
         }
         if (thisValue == null || !thisValue.equals(thatValue)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index e9531f3..5fb9e63 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -1808,6 +1808,7 @@ public class HTable implements HTableInterface, RegionLocator {
     return getKeysAndRegionsInRange(start, end, true).getFirst();
   }
 
+  @Override
   public void setOperationTimeout(int operationTimeout) {
     this.operationTimeout = operationTimeout;
     if (mutator != null) {
@@ -1816,6 +1817,7 @@ public class HTable implements HTableInterface, RegionLocator {
     multiAp.setOperationTimeout(operationTimeout);
   }
 
+  @Override
   public int getOperationTimeout() {
     return operationTimeout;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 7b2b136..21e3ce6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -468,7 +468,7 @@ public class HTableMultiplexer {
     }
 
     public long getTotalBufferedCount() {
-      return queue.size() + currentProcessingCount.get();
+      return (long) queue.size() + currentProcessingCount.get();
     }
 
     public AtomicAverageCounter getAverageLatencyCounter() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
index 502703b..d219306 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
@@ -308,6 +308,7 @@ public class HTablePool implements Closeable {
    * <p>
    * Note: this is a 'shutdown' of all the table pools.
    */
+  @Override
   public void close() throws IOException {
     for (String tableName : tables.keySet()) {
       closeTablePool(tableName);
@@ -524,6 +525,7 @@ public class HTablePool implements Closeable {
      *
      * @throws IOException
      */
+    @Override
     public void close() throws IOException {
       checkState();
       open = false;
@@ -635,7 +637,8 @@ public class HTablePool implements Closeable {
 
     private void checkState() {
       if (!isOpen()) {
-        throw new IllegalStateException("Table=" + new String(table.getTableName()) + " already closed");
+        throw new IllegalStateException("Table=" + table.getName().getNameAsString()
+          + " already closed");
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
index 48f2fd6..76759b7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class Increment extends Mutation implements Comparable<Row> {
-  private static final long HEAP_OVERHEAD =  ClassSize.REFERENCE + ClassSize.TIMERANGE;
+  private static final long HEAP_OVERHEAD = (long) ClassSize.REFERENCE + ClassSize.TIMERANGE;
   private TimeRange tr = new TimeRange();
 
   /**
@@ -164,6 +164,7 @@ public class Increment extends Mutation implements Comparable<Row> {
    *          client that is not interested in the result can save network bandwidth setting this
    *          to false.
    */
+  @Override
   public Increment setReturnResults(boolean returnResults) {
     super.setReturnResults(returnResults);
     return this;
@@ -173,6 +174,7 @@ public class Increment extends Mutation implements Comparable<Row> {
    * @return current setting for returnResults
    */
   // This method makes public the superclasses's protected method.
+  @Override
   public boolean isReturnResults() {
     return super.isReturnResults();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
index 126b117..b9ebac3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class NoServerForRegionException extends DoNotRetryRegionException {
-  private static final long serialVersionUID = 1L << 11 - 1L;
+  private static final long serialVersionUID = (1L << 11) - 1L;
 
   /** default constructor */
   public NoServerForRegionException() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
index 7ac4546..64b44a9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
@@ -38,10 +38,12 @@ public class PerClientRandomNonceGenerator implements NonceGenerator {
     this.clientId = (((long)Arrays.hashCode(clientIdBase)) << 32) + rdm.nextInt();
   }
 
+  @Override
   public long getNonceGroup() {
     return this.clientId;
   }
 
+  @Override
   public long newNonce() {
     long result = HConstants.NO_NONCE;
     do {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
index d7aa739..789e2e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
@@ -38,7 +38,7 @@ class RegistryFactory {
       ZooKeeperRegistry.class.getName());
     Registry registry = null;
     try {
-      registry = (Registry)Class.forName(registryClass).newInstance();
+      registry = (Registry)Class.forName(registryClass).getDeclaredConstructor().newInstance();
     } catch (Throwable t) {
       throw new IOException(t);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
index b03595a..af24d5c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
@@ -66,6 +66,7 @@ public class RpcRetryingCaller<T> {
   private final long pauseForCQTBE;
   private final int retries;
   private final int rpcTimeout;// timeout for each rpc request
+  private final Object lock = new Object();
   private final AtomicBoolean cancelled = new AtomicBoolean(false);
   private final RetryingCallerInterceptor interceptor;
   private final RetryingCallerInterceptorContext context;
@@ -105,16 +106,16 @@ public class RpcRetryingCaller<T> {
 
   private int getTimeout(int callTimeout){
     int timeout = getRemainingTime(callTimeout);
-    if (timeout <= 0 || rpcTimeout > 0 && rpcTimeout < timeout){
+    if (timeout <= 0 || (rpcTimeout > 0 && rpcTimeout < timeout)){
       timeout = rpcTimeout;
     }
     return timeout;
   }
 
   public void cancel(){
-    synchronized (cancelled){
+    synchronized (lock){
       cancelled.set(true);
-      cancelled.notifyAll();
+      lock.notifyAll();
     }
   }
 
@@ -181,9 +182,9 @@ public class RpcRetryingCaller<T> {
       }
       try {
         if (expectedSleep > 0) {
-          synchronized (cancelled) {
+          synchronized (lock) {
             if (cancelled.get()) return null;
-            cancelled.wait(expectedSleep);
+            lock.wait(expectedSleep);
           }
         }
         if (cancelled.get()) return null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 1340602..541b3d8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -871,6 +871,7 @@ public class Scan extends Query {
     return allowPartialResults;
   }
 
+  @Override
   public Scan setLoadColumnFamiliesOnDemand(boolean value) {
     return (Scan) super.setLoadColumnFamiliesOnDemand(value);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
index 3ea3802..5e1d162 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 public class LongColumnInterpreter extends ColumnInterpreter<Long, Long,
                  EmptyMsg, LongMsg, LongMsg> {
 
+  @Override
   public Long getValue(byte[] colFamily, byte[] colQualifier, Cell kv)
       throws IOException {
     if (kv == null || kv.getValueLength() != Bytes.SIZEOF_LONG)
@@ -49,7 +50,7 @@ public class LongColumnInterpreter extends ColumnInterpreter<Long, Long,
     return Bytes.toLong(kv.getValueArray(), kv.getValueOffset());
   }
 
-   @Override
+  @Override
   public Long add(Long l1, Long l2) {
     if (l1 == null ^ l2 == null) {
       return (l1 == null) ? l2 : l1; // either of one is null.

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
index 73fec38..55653d5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
@@ -651,8 +651,8 @@ public class ReplicationAdmin implements Closeable {
       admin = this.connection.getAdmin();
       HTableDescriptor htd = admin.getTableDescriptor(tableName);
       ReplicationState currentReplicationState = getTableReplicationState(htd);
-      if (enableRep && currentReplicationState != ReplicationState.ENABLED
-          || !enableRep && currentReplicationState != ReplicationState.DISABLED) {
+      if ((enableRep && currentReplicationState != ReplicationState.ENABLED)
+          || (!enableRep && currentReplicationState != ReplicationState.DISABLED)) {
         boolean isOnlineSchemaUpdateEnabled =
             this.connection.getConfiguration()
                 .getBoolean("hbase.online.schema.update.enable", true);
@@ -710,6 +710,7 @@ public class ReplicationAdmin implements Closeable {
     return ReplicationState.DISABLED;
   }
 
+  @SuppressWarnings("unchecked")
   private void checkConfiguredWALEntryFilters(ReplicationPeerConfig peerConfig)
     throws ReplicationException {
     String filterCSV = peerConfig.getConfiguration().
@@ -718,8 +719,8 @@ public class ReplicationAdmin implements Closeable {
       String[] filters = filterCSV.split(",");
       for (String filter : filters) {
         try {
-          Class clazz = Class.forName(filter);
-          Object o = clazz.newInstance();
+          Class<?> clazz = Class.forName(filter);
+          Object o = clazz.getDeclaredConstructor().newInstance();
         } catch (Exception e) {
           throw new ReplicationException("Configured WALEntryFilter " + filter +
               " could not be created. Failing add/update " + "peer operation.", e);
@@ -777,12 +778,12 @@ public class ReplicationAdmin implements Closeable {
    * @see java.lang.Object#equals(java.lang.Object)
    */
   private boolean compareForReplication(HTableDescriptor peerHtd, HTableDescriptor localHtd) {
-    if (peerHtd == localHtd) {
-      return true;
-    }
     if (peerHtd == null) {
       return false;
     }
+    if (peerHtd.equals(localHtd)) {
+      return true;
+    }
     boolean result = false;
 
     // Create a copy of peer HTD as we need to change its replication

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
index 1847b2e..5fc97ba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
@@ -34,7 +34,7 @@ public enum SecurityCapability {
   CELL_AUTHORIZATION(3),
   CELL_VISIBILITY(4);
 
-  private int value;
+  private final int value;
 
   public int getValue() {
     return value;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index d5c2613..09fe7e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -50,6 +50,7 @@ public class BinaryComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BinaryComparator.Builder builder =
       ComparatorProtos.BinaryComparator.newBuilder();
@@ -79,6 +80,7 @@ public class BinaryComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BinaryComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index c05eb8f..366b2f9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -52,6 +52,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BinaryPrefixComparator.Builder builder =
       ComparatorProtos.BinaryPrefixComparator.newBuilder();
@@ -81,6 +82,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BinaryPrefixComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index 0b7c52d..07af22f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -66,6 +66,7 @@ public class BitComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.BitComparator.Builder builder =
       ComparatorProtos.BitComparator.newBuilder();
@@ -99,6 +100,7 @@ public class BitComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof BitComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 18f49f6..d2f6ec8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -84,6 +84,7 @@ public class ColumnCountGetFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnCountGetFilter.Builder builder =
       FilterProtos.ColumnCountGetFilter.newBuilder();
@@ -113,6 +114,7 @@ public class ColumnCountGetFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof ColumnCountGetFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 6f297fb..806863a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -174,6 +174,7 @@ public class ColumnPaginationFilter extends FilterBase
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnPaginationFilter.Builder builder =
       FilterProtos.ColumnPaginationFilter.newBuilder();
@@ -213,6 +214,7 @@ public class ColumnPaginationFilter extends FilterBase
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof ColumnPaginationFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 6a9e6e9..eb0be8d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -99,6 +99,7 @@ public class ColumnPrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnPrefixFilter.Builder builder =
       FilterProtos.ColumnPrefixFilter.newBuilder();
@@ -128,6 +129,7 @@ public class ColumnPrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
    if (o == this) return true;
    if (!(o instanceof ColumnPrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 9e6c90f..9006f87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -143,8 +143,8 @@ public class ColumnRangeFilter extends FilterBase {
     int cmpMax = Bytes.compareTo(buffer, qualifierOffset, qualifierLength,
         this.maxColumn, 0, this.maxColumn.length);
 
-    if (this.maxColumnInclusive && cmpMax <= 0 ||
-        !this.maxColumnInclusive && cmpMax < 0) {
+    if ((this.maxColumnInclusive && cmpMax <= 0) ||
+        (!this.maxColumnInclusive && cmpMax < 0)) {
       return ReturnCode.INCLUDE;
     }
 
@@ -177,6 +177,7 @@ public class ColumnRangeFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.ColumnRangeFilter.Builder builder =
       FilterProtos.ColumnRangeFilter.newBuilder();
@@ -211,6 +212,7 @@ public class ColumnRangeFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
    if (o == this) return true;
    if (!(o instanceof ColumnRangeFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index 9987e23..b7595d5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -170,6 +170,7 @@ public abstract class CompareFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof CompareFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index 2843751..8582e38 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -217,6 +217,7 @@ public class DependentColumnFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.DependentColumnFilter.Builder builder =
       FilterProtos.DependentColumnFilter.newBuilder();
@@ -268,6 +269,7 @@ public class DependentColumnFilter extends CompareFilter {
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(
       value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof DependentColumnFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index f9722d3..9aa078c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -82,6 +82,7 @@ public class FamilyFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FamilyFilter.Builder builder =
       FilterProtos.FamilyFilter.newBuilder();
@@ -121,6 +122,7 @@ public class FamilyFilter extends CompareFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FamilyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
index 3c6bcab..812252c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
@@ -142,6 +142,7 @@ public abstract class FilterBase extends Filter {
    *
    * {@inheritDoc}
    */
+  @Override
   public Cell getNextCellHint(Cell currentKV) throws IOException {
     // Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
     // Thus to maintain compatibility we need to call the old version.
@@ -154,6 +155,7 @@ public abstract class FilterBase extends Filter {
    *
    * {@inheritDoc}
    */
+  @Override
   public boolean isFamilyEssential(byte[] name) throws IOException {
     return true;
   }
@@ -171,6 +173,7 @@ public abstract class FilterBase extends Filter {
   /**
    * Return filter's info for debugging and logging purpose.
    */
+  @Override
   public String toString() {
     return this.getClass().getSimpleName();
   }
@@ -178,6 +181,7 @@ public abstract class FilterBase extends Filter {
   /**
    * Return length 0 byte array for Filters that don't require special serialization
    */
+  @Override
   public byte[] toByteArray() throws IOException {
     return new byte[0];
   }
@@ -189,6 +193,7 @@ public abstract class FilterBase extends Filter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter other) {
     return true;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 0b39b56..8345fcf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -221,6 +221,7 @@ final public class FilterList extends Filter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() throws IOException {
     FilterProtos.FilterList.Builder builder = FilterProtos.FilterList.newBuilder();
     builder.setOperator(FilterProtos.FilterList.Operator.valueOf(operator.name()));
@@ -262,6 +263,7 @@ final public class FilterList extends Filter {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter other) {
     if (other == this) return true;
     if (!(other instanceof FilterList)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index 4418527..71370ec 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -54,6 +54,7 @@ final public class FilterWrapper extends Filter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() throws IOException {
     FilterProtos.FilterWrapper.Builder builder =
       FilterProtos.FilterWrapper.newBuilder();
@@ -181,6 +182,7 @@ final public class FilterWrapper extends Filter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FilterWrapper)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index dafb485..d18a1f8a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -42,6 +42,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
   public FirstKeyOnlyFilter() {
   }
 
+  @Override
   public void reset() {
     foundKV = false;
   }
@@ -84,6 +85,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FirstKeyOnlyFilter.Builder builder =
       FilterProtos.FirstKeyOnlyFilter.newBuilder();
@@ -113,6 +115,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FirstKeyOnlyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index fc40982..2f1c037 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -82,6 +82,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder =
       FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder();
@@ -118,6 +119,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FirstKeyValueMatchingQualifiersFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 88fc17b..d93d234 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -258,6 +258,7 @@ public class FuzzyRowFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() {
     FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder();
     for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
@@ -466,45 +467,55 @@ public class FuzzyRowFilter extends FilterBase {
   /** Abstracts directional comparisons based on scan direction. */
   private enum Order {
     ASC {
+      @Override
       public boolean lt(int lhs, int rhs) {
         return lhs < rhs;
       }
 
+      @Override
       public boolean gt(int lhs, int rhs) {
         return lhs > rhs;
       }
 
+      @Override
       public byte inc(byte val) {
         // TODO: what about over/underflow?
         return (byte) (val + 1);
       }
 
+      @Override
       public boolean isMax(byte val) {
         return val == (byte) 0xff;
       }
 
+      @Override
       public byte min() {
         return 0;
       }
     },
     DESC {
+      @Override
       public boolean lt(int lhs, int rhs) {
         return lhs > rhs;
       }
 
+      @Override
       public boolean gt(int lhs, int rhs) {
         return lhs < rhs;
       }
 
+      @Override
       public byte inc(byte val) {
         // TODO: what about over/underflow?
         return (byte) (val - 1);
       }
 
+      @Override
       public boolean isMax(byte val) {
         return val == 0;
       }
 
+      @Override
       public byte min() {
         return (byte) 0xFF;
       }
@@ -627,6 +638,7 @@ public class FuzzyRowFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof FuzzyRowFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index a16e48b..671e596 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -65,6 +65,7 @@ public class InclusiveStopFilter extends FilterBase {
     return v;
   }
 
+  @Override
   public boolean filterRowKey(byte[] buffer, int offset, int length) {
     if (buffer == null) {
       //noinspection RedundantIfStatement
@@ -81,6 +82,7 @@ public class InclusiveStopFilter extends FilterBase {
     return done;
   }
 
+  @Override
   public boolean filterAllRemaining() {
     return done;
   }
@@ -95,6 +97,7 @@ public class InclusiveStopFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.InclusiveStopFilter.Builder builder =
       FilterProtos.InclusiveStopFilter.newBuilder();
@@ -124,6 +127,7 @@ public class InclusiveStopFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof InclusiveStopFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index cebb26a..3895b2a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -88,6 +88,7 @@ public class KeyOnlyFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.KeyOnlyFilter.Builder builder =
       FilterProtos.KeyOnlyFilter.newBuilder();
@@ -117,6 +118,7 @@ public class KeyOnlyFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof KeyOnlyFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 7e9503c..fd21dbd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -141,6 +141,7 @@ public class MultiRowRangeFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte[] toByteArray() {
     FilterProtos.MultiRowRangeFilter.Builder builder = FilterProtos.MultiRowRangeFilter
         .newBuilder();
@@ -193,6 +194,7 @@ public class MultiRowRangeFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized are equal to the
    *         corresponding fields in other. Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this)
       return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index d3eb642..96a0b38 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -115,6 +115,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.MultipleColumnPrefixFilter.Builder builder =
       FilterProtos.MultipleColumnPrefixFilter.newBuilder();
@@ -152,6 +153,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof MultipleColumnPrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index a9b3c8e..d2f7f45 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -62,6 +62,7 @@ public class NullComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     ComparatorProtos.NullComparator.Builder builder =
       ComparatorProtos.NullComparator.newBuilder();
@@ -90,6 +91,7 @@ public class NullComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof NullComparator)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index ee94d4c..9692f22 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -24,11 +24,11 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
 import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
 
 import java.io.IOException;
 import java.util.ArrayList;
+
 /**
  * Implementation of Filter interface that limits results to a specific page
  * size. It terminates scanning once the number of filter-passed rows is &gt;
@@ -72,15 +72,18 @@ public class PageFilter extends FilterBase {
     return v;
   }
 
+  @Override
   public boolean filterAllRemaining() {
     return this.rowsAccepted >= this.pageSize;
   }
 
+  @Override
   public boolean filterRow() {
     this.rowsAccepted++;
     return this.rowsAccepted > this.pageSize;
   }
   
+  @Override
   public boolean hasFilterRow() {
     return true;
   }
@@ -95,6 +98,7 @@ public class PageFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.PageFilter.Builder builder =
       FilterProtos.PageFilter.newBuilder();
@@ -124,6 +128,7 @@ public class PageFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof PageFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index 4ba675b..cdb611c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -30,6 +30,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EmptyStackException;
@@ -263,7 +264,7 @@ public class ParseFilter {
       e.printStackTrace();
     }
     throw new IllegalArgumentException("Incorrect filter string " +
-                                       new String(filterStringAsByteArray));
+        new String(filterStringAsByteArray, StandardCharsets.UTF_8));
   }
 
 /**
@@ -811,9 +812,9 @@ public class ParseFilter {
     else if (Bytes.equals(comparatorType, ParseConstants.binaryPrefixType))
       return new BinaryPrefixComparator(comparatorValue);
     else if (Bytes.equals(comparatorType, ParseConstants.regexStringType))
-      return new RegexStringComparator(new String(comparatorValue));
+      return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
     else if (Bytes.equals(comparatorType, ParseConstants.substringType))
-      return new SubstringComparator(new String(comparatorValue));
+      return new SubstringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
     else
       throw new IllegalArgumentException("Incorrect comparatorType");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 8030ff6..a3fc440 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -50,6 +50,7 @@ public class PrefixFilter extends FilterBase {
     return prefix;
   }
 
+  @Override
   public boolean filterRowKey(byte[] buffer, int offset, int length) {
     if (buffer == null || this.prefix == null)
       return true;
@@ -80,14 +81,17 @@ public class PrefixFilter extends FilterBase {
     return v;
   }
 
+  @Override
   public boolean filterRow() {
     return filterRow;
   }
 
+  @Override
   public void reset() {
     filterRow = true;
   }
 
+  @Override
   public boolean filterAllRemaining() {
     return passedPrefix;
   }
@@ -102,6 +106,7 @@ public class PrefixFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.PrefixFilter.Builder builder =
       FilterProtos.PrefixFilter.newBuilder();
@@ -131,6 +136,7 @@ public class PrefixFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof PrefixFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index bf3a5f9..bf503c2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -81,6 +81,7 @@ public class QualifierFilter extends CompareFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.QualifierFilter.Builder builder =
       FilterProtos.QualifierFilter.newBuilder();
@@ -120,6 +121,7 @@ public class QualifierFilter extends CompareFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof QualifierFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index 243923f..f6d091f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -90,7 +90,8 @@ public class RandomRowFilter extends FilterBase {
   public boolean filterRow() {
     return filterOutRow;
   }
-  
+
+  @Override
   public boolean hasFilterRow() {
     return true;
   }
@@ -118,6 +119,7 @@ public class RandomRowFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.RandomRowFilter.Builder builder =
       FilterProtos.RandomRowFilter.newBuilder();
@@ -147,6 +149,7 @@ public class RandomRowFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof RandomRowFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index 23a1e5d..ee32a92 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -97,6 +97,7 @@ public class RowFilter extends CompareFilter {
  /**
   * @return The filter serialized using pb
   */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.RowFilter.Builder builder =
       FilterProtos.RowFilter.newBuilder();
@@ -136,6 +137,7 @@ public class RowFilter extends CompareFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof RowFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index 5c8668b..7dc0387 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -97,6 +97,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
   }
 
   // We cleaned result row in FilterRow to be consistent with scanning process.
+  @Override
   public boolean hasFilterRow() {
    return true;
   }
@@ -132,6 +133,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     FilterProtos.SingleColumnValueExcludeFilter.Builder builder =
       FilterProtos.SingleColumnValueExcludeFilter.newBuilder();
@@ -175,6 +177,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof SingleColumnValueExcludeFilter)) return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index 7dad1a4..0cf3e2f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -216,16 +216,19 @@ public class SingleColumnValueFilter extends FilterBase {
     }
   }
 
+  @Override
   public boolean filterRow() {
     // If column was found, return false if it was matched, true if it was not
     // If column not found, return true if we filter if missing, false if not
     return this.foundColumn? !this.matchedColumn: this.filterIfMissing;
   }
   
+  @Override
   public boolean hasFilterRow() {
     return true;
   }
 
+  @Override
   public void reset() {
     foundColumn = false;
     matchedColumn = false;
@@ -325,6 +328,7 @@ public class SingleColumnValueFilter extends FilterBase {
   /**
    * @return The filter serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     return convert().toByteArray();
   }
@@ -364,6 +368,7 @@ public class SingleColumnValueFilter extends FilterBase {
    * @return true if and only if the fields of the filter that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(Filter o) {
     if (o == this) return true;
     if (!(o instanceof SingleColumnValueFilter)) return false;
@@ -382,6 +387,7 @@ public class SingleColumnValueFilter extends FilterBase {
    * column in whole scan. If filterIfMissing == false, all families are essential,
    * because of possibility of skipping the rows without any data in filtered CF.
    */
+  @Override
   public boolean isFamilyEssential(byte[] name) {
     return !this.filterIfMissing || Bytes.equals(name, this.columnFamily);
   }