You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2017/11/18 01:22:32 UTC

[04/30] hbase git commit: HBASE-19239 Fix findbugs and error-prone issues

HBASE-19239 Fix findbugs and error-prone issues

Fixes for hbase-common


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c179d514
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c179d514
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c179d514

Branch: refs/heads/branch-1
Commit: c179d5144f9caead37dddcdd6c92cd52dd4b50bd
Parents: bda31bb
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Nov 15 18:47:45 2017 -0800
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Nov 17 15:20:45 2017 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/AsyncConsoleAppender.java      |   1 +
 .../java/org/apache/hadoop/hbase/CellUtil.java  |   2 +-
 .../JitterScheduledThreadPoolExecutorImpl.java  |   3 +-
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  12 +-
 .../org/apache/hadoop/hbase/ProcedureInfo.java  |   1 +
 .../hadoop/hbase/codec/CellCodecWithTags.java   |   1 +
 .../hadoop/hbase/codec/KeyValueCodec.java       |   1 +
 .../hbase/codec/KeyValueCodecWithTags.java      |   1 +
 .../hadoop/hbase/io/BoundedByteBufferPool.java  |   8 +-
 .../hadoop/hbase/io/ImmutableBytesWritable.java |   3 +
 .../hadoop/hbase/io/compress/Compression.java   |  12 +-
 .../hbase/io/encoding/DataBlockEncoding.java    |   9 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.java  |   2 +-
 .../hbase/io/encoding/EncodedDataBlock.java     |   8 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.java |   2 +-
 .../hadoop/hbase/io/util/LRUDictionary.java     |   4 +-
 .../hadoop/hbase/security/UserProvider.java     |   6 +-
 .../hadoop/hbase/trace/SpanReceiverHost.java    |   2 +-
 .../hadoop/hbase/types/CopyOnWriteArrayMap.java |   6 +-
 .../hadoop/hbase/util/AbstractByteRange.java    |   6 +-
 .../org/apache/hadoop/hbase/util/Base64.java    |   9 +-
 .../hadoop/hbase/util/ByteBufferArray.java      |   2 +-
 .../hadoop/hbase/util/ByteBufferUtils.java      |  20 +--
 .../org/apache/hadoop/hbase/util/Bytes.java     |  10 +-
 .../hadoop/hbase/util/ClassLoaderBase.java      |   1 +
 .../org/apache/hadoop/hbase/util/ClassSize.java |   2 +-
 .../org/apache/hadoop/hbase/util/HasThread.java |   3 +-
 .../java/org/apache/hadoop/hbase/util/JVM.java  |   7 +-
 .../org/apache/hadoop/hbase/util/LongAdder.java |   6 +
 .../apache/hadoop/hbase/util/OrderedBytes.java  |  26 ++--
 .../org/apache/hadoop/hbase/util/Striped64.java |   1 +
 .../org/apache/hadoop/hbase/util/Threads.java   |   5 +-
 .../org/apache/hadoop/hbase/util/Triple.java    |   6 +-
 .../apache/hadoop/hbase/util/UnsafeAccess.java  |   8 +-
 .../org/apache/hadoop/hbase/ClassFinder.java    |   2 +-
 .../hadoop/hbase/HBaseCommonTestingUtility.java |   3 +-
 .../org/apache/hadoop/hbase/TestCellUtil.java   |  31 +++--
 .../apache/hadoop/hbase/TestChoreService.java   |  20 +--
 .../hadoop/hbase/TestHBaseConfiguration.java    |   7 +-
 .../org/apache/hadoop/hbase/TestKeyValue.java   |   2 +-
 .../hadoop/hbase/codec/TestKeyValueCodec.java   |   4 +-
 .../hbase/io/crypto/TestCipherProvider.java     |   6 +-
 .../hadoop/hbase/io/crypto/TestKeyProvider.java |   6 +-
 .../io/crypto/TestKeyStoreKeyProvider.java      |   3 +-
 .../hadoop/hbase/io/crypto/aes/TestAES.java     |   8 +-
 .../hadoop/hbase/io/util/TestLRUDictionary.java |   2 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 134 +++++++++++++++----
 .../hbase/util/ClassLoaderTestHelper.java       |   4 +-
 .../org/apache/hadoop/hbase/util/TestBytes.java |  22 +--
 .../hbase/util/TestConcatenatedLists.java       |   2 +
 .../hbase/util/TestCoprocessorClassLoader.java  |  18 ++-
 .../hbase/util/TestLoadTestKVGenerator.java     |   9 +-
 .../hadoop/hbase/util/TestOrderedBytes.java     |  25 +++-
 53 files changed, 321 insertions(+), 183 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
index 338265d..701c6e1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
@@ -40,6 +40,7 @@ public class AsyncConsoleAppender extends AsyncAppender {
     consoleAppender.setTarget(value);
   }
 
+  @Override
   public void activateOptions() {
     consoleAppender.activateOptions();
     super.activateOptions();

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 0290ded..981fad8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -633,7 +633,7 @@ public final class CellUtil {
     if (cell instanceof KeyValue) {
       return ((KeyValue)cell).heapSizeWithoutTags();
     }
-    return getSumOfCellKeyElementLengths(cell) + cell.getValueLength();
+    return (long) getSumOfCellKeyElementLengths(cell) + cell.getValueLength();
   }
 
   /********************* tags *************************************/

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
index c330fa7..980f001 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
@@ -51,12 +51,13 @@ public class JitterScheduledThreadPoolExecutorImpl extends ScheduledThreadPoolEx
     this.spread = spread;
   }
 
+  @Override
   protected <V> java.util.concurrent.RunnableScheduledFuture<V> decorateTask(
       Runnable runnable, java.util.concurrent.RunnableScheduledFuture<V> task) {
     return new JitteredRunnableScheduledFuture<>(task);
   }
 
-
+  @Override
   protected <V> java.util.concurrent.RunnableScheduledFuture<V> decorateTask(
       Callable<V> callable, java.util.concurrent.RunnableScheduledFuture<V> task) {
     return new JitteredRunnableScheduledFuture<>(task);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 7670aea..2b09faf 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -197,9 +197,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
    */
   public static long getKeyValueDataStructureSize(int klength, int vlength, int tagsLength) {
     if (tagsLength == 0) {
-      return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength;
+      return (long) KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength;
     }
-    return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength;
+    return (long) KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength;
   }
 
   /**
@@ -213,7 +213,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
    * @return the key data structure length
    */
   public static long getKeyDataStructureSize(int rlength, int flength, int qlength) {
-    return KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength;
+    return (long) KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength;
   }
 
   /**
@@ -2531,7 +2531,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
     int length = kv.getLength();
     out.writeInt(length);
     out.write(kv.getBuffer(), kv.getOffset(), length);
-    return length + Bytes.SIZEOF_INT;
+    return (long) length + Bytes.SIZEOF_INT;
   }
 
   /**
@@ -2553,7 +2553,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
     // This does same as DataOuput#writeInt (big-endian, etc.)
     out.write(Bytes.toBytes(length));
     out.write(kv.getBuffer(), kv.getOffset(), length);
-    return length + Bytes.SIZEOF_INT;
+    return (long) length + Bytes.SIZEOF_INT;
   }
 
   /**
@@ -2580,7 +2580,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
     // This does same as DataOuput#writeInt (big-endian, etc.)
     StreamUtils.writeInt(out, length);
     out.write(kv.getBuffer(), kv.getOffset(), length);
-    return length + Bytes.SIZEOF_INT;
+    return (long) length + Bytes.SIZEOF_INT;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
index 939002c..e427e50 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -79,6 +79,7 @@ public class ProcedureInfo implements Cloneable {
 
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="CN_IDIOM_NO_SUPER_CALL",
       justification="Intentional; calling super class clone doesn't make sense here.")
+  @Override
   public ProcedureInfo clone() {
     return new ProcedureInfo(procId, procName, procOwner, procState, parentId, nonceKey,
       exception, lastUpdate, startTime, result);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
index b4efaf8..21c841c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
@@ -80,6 +80,7 @@ public class CellCodecWithTags implements Codec {
       super(in);
     }
 
+    @Override
     protected Cell parseCell() throws IOException {
       byte[] row = readByteArray(this.in);
       byte[] family = readByteArray(in);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
index 07fd838..6df9ec3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
@@ -64,6 +64,7 @@ public class KeyValueCodec implements Codec {
       super(in);
     }
 
+    @Override
     protected Cell parseCell() throws IOException {
       return KeyValueUtil.iscreate(in, false);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
index 5d34a46..c241785 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
@@ -70,6 +70,7 @@ public class KeyValueCodecWithTags implements Codec {
       super(in);
     }
 
+    @Override
     protected Cell parseCell() throws IOException {
       return KeyValueUtil.iscreate(in, true);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
index 4d52614..8c371a6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
@@ -60,7 +60,7 @@ public class BoundedByteBufferPool {
   volatile int runningAverage;
 
   // Scratch that keeps rough total size of pooled bytebuffers
-  private volatile int totalReservoirCapacity;
+  private AtomicLong totalReservoirCapacity = new AtomicLong(0);
 
   // For reporting
   private AtomicLong allocations = new AtomicLong(0);
@@ -89,7 +89,7 @@ public class BoundedByteBufferPool {
     try {
       bb = this.buffers.poll();
       if (bb != null) {
-        this.totalReservoirCapacity -= bb.capacity();
+        this.totalReservoirCapacity.addAndGet(-bb.capacity());
       }
     } finally {
       lock.unlock();
@@ -119,8 +119,8 @@ public class BoundedByteBufferPool {
     try {
       success = this.buffers.offer(bb);
       if (success) {
-        this.totalReservoirCapacity += bb.capacity();
-        average = this.totalReservoirCapacity / this.buffers.size(); // size will never be 0.
+        average = (int) this.totalReservoirCapacity.addAndGet(bb.capacity()) /
+            this.buffers.size(); // size will never be 0.
       }
     } finally {
       lock.unlock();

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
index d74a5d6..dd7e300 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
@@ -148,6 +148,7 @@ implements WritableComparable<ImmutableBytesWritable> {
     return this.offset;
   }
 
+  @Override
   public void readFields(final DataInput in) throws IOException {
     this.length = in.readInt();
     this.bytes = new byte[this.length];
@@ -155,6 +156,7 @@ implements WritableComparable<ImmutableBytesWritable> {
     this.offset = 0;
   }
 
+  @Override
   public void write(final DataOutput out) throws IOException {
     out.writeInt(this.length);
     out.write(this.bytes, this.offset, this.length);
@@ -175,6 +177,7 @@ implements WritableComparable<ImmutableBytesWritable> {
    * @return Positive if left is bigger than right, 0 if they are equal, and
    *         negative if left is smaller than right.
    */
+  @Override
   public int compareTo(ImmutableBytesWritable that) {
     return WritableComparator.compareBytes(
       this.bytes, this.offset, this.length,

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index 9697da3..d056115 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -106,7 +106,7 @@ public final class Compression {
     LZO("lzo") {
       // Use base type to avoid compile-time dependencies.
       private volatile transient CompressionCodec lzoCodec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
@@ -133,7 +133,7 @@ public final class Compression {
     },
     GZ("gz") {
       private volatile transient GzipCodec codec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       DefaultCodec getCodec(Configuration conf) {
@@ -185,7 +185,7 @@ public final class Compression {
     SNAPPY("snappy") {
       // Use base type to avoid compile-time dependencies.
       private volatile transient CompressionCodec snappyCodec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
@@ -212,7 +212,7 @@ public final class Compression {
     LZ4("lz4") {
       // Use base type to avoid compile-time dependencies.
       private volatile transient CompressionCodec lz4Codec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
@@ -239,7 +239,7 @@ public final class Compression {
     BZIP2("bzip2") {
       // Use base type to avoid compile-time dependencies.
       private volatile transient CompressionCodec bzipCodec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
@@ -266,7 +266,7 @@ public final class Compression {
     ZSTD("zstd") {
       // Use base type to avoid compile-time dependencies.
       private volatile transient CompressionCodec zStandardCodec;
-      private transient Object lock = new Object();
+      private final transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
index d7535e5..7d6b15c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
@@ -176,13 +176,10 @@ public enum DataBlockEncoding {
 
   protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){
       try {
-        return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance();
-      } catch (InstantiationException e) {
+        return (DataBlockEncoder)Class.forName(fullyQualifiedClassName)
+            .getDeclaredConstructor().newInstance();
+      } catch (Exception e) {
         throw new RuntimeException(e);
-      } catch (IllegalAccessException e) {
-        throw new RuntimeException(e);
-      } catch (ClassNotFoundException e) {
-        throw new IllegalArgumentException(e);
       }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
index 6144478..f28100d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
@@ -206,7 +206,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
 
   private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
       throws IOException {
-    byte flag = 0;
+    int flag = 0; // Do not use more bits that can fit into a byte
     int kLength = KeyValueUtil.keyLength(cell);
     int vLength = cell.getValueLength();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
index a4fca2c..192c84d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
@@ -255,7 +255,7 @@ public class EncodedDataBlock {
       }
       BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
       baos.writeTo(stream);
-      this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf);
+      this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.ourBytes);
     } catch (IOException e) {
       throw new RuntimeException(String.format(
           "Bug in encoding part of algorithm %s. " +
@@ -266,11 +266,11 @@ public class EncodedDataBlock {
   }
 
   private static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream {
-    private byte[] buf;
+    private byte[] ourBytes;
 
     @Override
-    public void write(byte[] b, int off, int len) {
-      this.buf = b;
+    public synchronized void write(byte[] b, int off, int len) {
+      this.ourBytes = b;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
index c14b542..72b6a5c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
@@ -250,7 +250,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
 
   private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
       throws IOException {
-    byte flag = 0;
+    int flag = 0; // Do not use more bits than will fit into a byte
     int kLength = KeyValueUtil.keyLength(cell);
     int vLength = cell.getValueLength();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
index 8562cf0..78ae9a6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
@@ -134,7 +134,7 @@ public class LRUDictionary implements Dictionary {
     }
 
     private void moveToHead(Node n) {
-      if (head == n) {
+      if (head.equals(n)) {
         // no-op -- it's already the head.
         return;
       }
@@ -147,7 +147,7 @@ public class LRUDictionary implements Dictionary {
       if (n.next != null) {
         n.next.prev = n.prev;
       } else {
-        assert n == tail;
+        assert n.equals(tail);
         tail = n.prev;
       }
       // Node is now removed from the list. Re-add it at the head.

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 7e6de4a..9eac12f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -98,11 +98,11 @@ public class UserProvider extends BaseConfigurable {
           }
 
           // Provide the reload function that uses the executor thread.
-          public ListenableFuture<String[]> reload(final String k,
-                                                   String[] oldValue) throws Exception {
+          @Override
+          public ListenableFuture<String[]> reload(final String k, String[] oldValue)
+              throws Exception {
 
             return executor.submit(new Callable<String[]>() {
-
               @Override
               public String[] call() throws Exception {
                 return getGroupStrings(k);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index 2d9c398..4818efc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -45,7 +45,7 @@ public class SpanReceiverHost {
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SE_BAD_FIELD")
   private static enum SingletonHolder {
     INSTANCE;
-    Object lock = new Object();
+    final Object lock = new Object();
     SpanReceiverHost host = null; // FindBugs: SE_BAD_FIELD
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
index 8de39ae..8ed2322 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
@@ -706,7 +706,7 @@ public class CopyOnWriteArrayMap<K, V> extends AbstractMap<K, V>
     }
   }
 
-  private final class ArrayKeyIterator<K, V> implements Iterator<K> {
+  private static final class ArrayKeyIterator<K, V> implements Iterator<K> {
     int index;
     private final ArrayHolder<K, V> holder;
 
@@ -732,7 +732,7 @@ public class CopyOnWriteArrayMap<K, V> extends AbstractMap<K, V>
     }
   }
 
-  private final class ArrayValueIterator<K, V> implements Iterator<V> {
+  private static final class ArrayValueIterator<K, V> implements Iterator<V> {
     int index;
     private final ArrayHolder<K, V> holder;
 
@@ -758,7 +758,7 @@ public class CopyOnWriteArrayMap<K, V> extends AbstractMap<K, V>
     }
   }
 
-  private final class ArrayEntryIterator<K, V> implements Iterator<Map.Entry<K, V>> {
+  private static final class ArrayEntryIterator<K, V> implements Iterator<Map.Entry<K, V>> {
 
     int index;
     private final ArrayHolder<K, V> holder;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
index a68d12f..137d1ca 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
@@ -189,9 +189,9 @@ public abstract class AbstractByteRange implements ByteRange {
   public short getShort(int index) {
     int offset = this.offset + index;
     short n = 0;
-    n ^= bytes[offset] & 0xFF;
-    n <<= 8;
-    n ^= bytes[offset + 1] & 0xFF;
+    n = (short) ((n ^ bytes[offset]) & 0xFF);
+    n = (short) (n << 8);
+    n = (short) ((n ^ bytes[offset + 1]) & 0xFF);
     return n;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 0043135..00c05cd 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -40,6 +40,7 @@ import java.io.ObjectOutputStream;
 import java.io.OutputStream;
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
@@ -571,7 +572,7 @@ public class Base64 {
       return new String(baos.toByteArray(), PREFERRED_ENCODING);
 
     } catch (UnsupportedEncodingException uue) {
-      return new String(baos.toByteArray());
+      return new String(baos.toByteArray(), StandardCharsets.UTF_8);
 
     } catch (IOException e) {
       LOG.error("error encoding object", e);
@@ -696,7 +697,7 @@ public class Base64 {
         return new String(baos.toByteArray(), PREFERRED_ENCODING);
 
       } catch (UnsupportedEncodingException uue) {
-        return new String(baos.toByteArray());
+        return new String(baos.toByteArray(), StandardCharsets.UTF_8);
 
       } catch (IOException e) {
         LOG.error("error encoding byte array", e);
@@ -753,7 +754,7 @@ public class Base64 {
       return new String(outBuff, 0, e, PREFERRED_ENCODING);
 
     } catch (UnsupportedEncodingException uue) {
-      return new String(outBuff, 0, e);
+      return new String(outBuff, 0, e, StandardCharsets.UTF_8);
     }
   } // end encodeBytes
 
@@ -928,7 +929,7 @@ public class Base64 {
       bytes = s.getBytes(PREFERRED_ENCODING);
 
     } catch (UnsupportedEncodingException uee) {
-      bytes = s.getBytes();
+      bytes = s.getBytes(StandardCharsets.UTF_8);
     } // end catch
 
     // Decode

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index d3414dd..d9f8dcf 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -167,7 +167,7 @@ public final class ByteBufferArray {
     int endBuffer = (int) (end / bufferSize), endOffset = (int) (end % bufferSize);
     assert array.length >= len + arrayOffset;
     assert startBuffer >= 0 && startBuffer < bufferCount;
-    assert endBuffer >= 0 && endBuffer < bufferCount
+    assert (endBuffer >= 0 && endBuffer < bufferCount)
         || (endBuffer == bufferCount && endOffset == 0);
     if (startBuffer >= locks.length || startBuffer < 0) {
       String msg = "Failed multiple, start=" + start + ",startBuffer="

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
index cf59f69..b5b1d96 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
@@ -190,27 +190,27 @@ public final class ByteBufferUtils {
       return 8;
     }
 
-    if (value < (1l << 4 * 8)) {
+    if (value < (1l << (4 * 8))) {
       // no more than 4 bytes
-      if (value < (1l << 2 * 8)) {
-        if (value < (1l << 1 * 8)) {
+      if (value < (1l << (2 * 8))) {
+        if (value < (1l << (1 * 8))) {
           return 1;
         }
         return 2;
       }
-      if (value < (1l << 3 * 8)) {
+      if (value < (1l << (3 * 8))) {
         return 3;
       }
       return 4;
     }
     // more than 4 bytes
-    if (value < (1l << 6 * 8)) {
-      if (value < (1l << 5 * 8)) {
+    if (value < (1l << (6 * 8))) {
+      if (value < (1l << (5 * 8))) {
         return 5;
       }
       return 6;
     }
-    if (value < (1l << 7 * 8)) {
+    if (value < (1l << (7 * 8))) {
       return 7;
     }
     return 8;
@@ -226,13 +226,13 @@ public final class ByteBufferUtils {
       return 4;
     }
 
-    if (value < (1 << 2 * 8)) {
-      if (value < (1 << 1 * 8)) {
+    if (value < (1 << (2 * 8))) {
+      if (value < (1 << (1 * 8))) {
         return 1;
       }
       return 2;
     }
-    if (value <= (1 << 3 * 8)) {
+    if (value <= (1 << (3 * 8))) {
       return 3;
     }
     return 4;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 820b81e..93dcafe 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -1122,9 +1122,9 @@ public class Bytes implements Comparable<Bytes> {
       return toShortUnsafe(bytes, offset);
     } else {
       short n = 0;
-      n ^= bytes[offset] & 0xFF;
-      n <<= 8;
-      n ^= bytes[offset+1] & 0xFF;
+      n = (short) ((n ^ bytes[offset]) & 0xFF);
+      n = (short) (n << 8);
+      n = (short) ((n ^ bytes[offset+1]) & 0xFF);
       return n;
    }
   }
@@ -1565,8 +1565,8 @@ public class Bytes implements Comparable<Bytes> {
         final int stride = 8;
         final int minLength = Math.min(length1, length2);
         int strideLimit = minLength & ~(stride - 1);
-        final long offset1Adj = offset1 + BYTE_ARRAY_BASE_OFFSET;
-        final long offset2Adj = offset2 + BYTE_ARRAY_BASE_OFFSET;
+        final long offset1Adj = (long) offset1 + BYTE_ARRAY_BASE_OFFSET;
+        final long offset2Adj = (long) offset2 + BYTE_ARRAY_BASE_OFFSET;
         int i;
 
         /*

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
index 262864a..35acbde 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
@@ -62,6 +62,7 @@ public class ClassLoaderBase extends URLClassLoader {
   /**
    * Returns the lock object for class loading operations.
    */
+  @Override
   protected Object getClassLoadingLock(String className) {
     Object lock = parallelLockMap.get(className);
     if (lock != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 51e1694..edb5b2d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -344,7 +344,7 @@ public class ClassSize {
    * @return the size estimate, in bytes
    */
   private static long estimateBaseFromCoefficients(int [] coeff, boolean debug) {
-    long prealign_size = OBJECT + coeff[0] + coeff[2] * REFERENCE;
+    long prealign_size = (long) OBJECT + coeff[0] + coeff[2] * REFERENCE;
 
     // Round up to a multiple of 8
     long size = align(prealign_size) + align(coeff[1] * ARRAY);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
index 4457fe0..439b321 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
@@ -44,7 +44,8 @@ public abstract class HasThread implements Runnable {
   public Thread getThread() {
     return thread;
   }
-  
+
+  @Override
   public abstract void run();
   
   //// Begin delegation to Thread

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index 3625a12..9e9fc6f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -26,6 +26,7 @@ import java.lang.management.ManagementFactory;
 import java.lang.management.OperatingSystemMXBean;
 import java.lang.management.RuntimeMXBean;
 import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -160,7 +161,7 @@ public class JVM {
           new String[]{"bash", "-c",
               "ls /proc/" + pidhost[0] + "/fdinfo | wc -l"});
       inputStream = p.getInputStream();
-      inputStreamReader = new InputStreamReader(inputStream);
+      inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
       bufferedReader = new BufferedReader(inputStreamReader);
       String openFileDesCount;
       if ((openFileDesCount = bufferedReader.readLine()) != null) {
@@ -236,7 +237,7 @@ public class JVM {
       int count = 0;
       Process p = Runtime.getRuntime().exec("ps -e");
       inputStream = p.getInputStream();
-      inputStreamReader = new InputStreamReader(inputStream);
+      inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
       bufferedReader = new BufferedReader(inputStreamReader);
       while (bufferedReader.readLine() != null) {
         count++;
@@ -288,7 +289,7 @@ public class JVM {
       //using linux bash commands to retrieve info
       Process p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "ulimit -n"});
       in = p.getInputStream();
-      output = new BufferedReader(new InputStreamReader(in));
+      output = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
       String maxFileDesCount;
       if ((maxFileDesCount = output.readLine()) != null) {
         return Long.parseLong(maxFileDesCount);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
index 9bdb829..a969949 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
@@ -67,6 +67,7 @@ public class LongAdder extends Striped64 implements Serializable {
     /**
      * Version of plus for use in retryUpdate
      */
+    @Override
     final long fn(long v, long x) { return v + x; }
 
     /**
@@ -171,6 +172,7 @@ public class LongAdder extends Striped64 implements Serializable {
      * Returns the String representation of the {@link #sum}.
      * @return the String representation of the {@link #sum}
      */
+    @Override
     public String toString() {
         return Long.toString(sum());
     }
@@ -180,6 +182,7 @@ public class LongAdder extends Striped64 implements Serializable {
      *
      * @return the sum
      */
+    @Override
     public long longValue() {
         return sum();
     }
@@ -188,6 +191,7 @@ public class LongAdder extends Striped64 implements Serializable {
      * Returns the {@link #sum} as an {@code int} after a narrowing
      * primitive conversion.
      */
+    @Override
     public int intValue() {
         return (int)sum();
     }
@@ -196,6 +200,7 @@ public class LongAdder extends Striped64 implements Serializable {
      * Returns the {@link #sum} as a {@code float}
      * after a widening primitive conversion.
      */
+    @Override
     public float floatValue() {
         return (float)sum();
     }
@@ -204,6 +209,7 @@ public class LongAdder extends Striped64 implements Serializable {
      * Returns the {@link #sum} as a {@code double} after a widening
      * primitive conversion.
      */
+    @Override
     public double doubleValue() {
         return (double)sum();
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index 9a40aee..03ff745 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -505,17 +505,17 @@ public class OrderedBytes {
     x = src.get();
     a1 = ord.apply(x) & 0xff;
     if (-1 == unsignedCmp(a0, 249)) {
-      return (a0 - 241) * 256 + a1 + 240;
+      return (a0 - 241L) * 256 + a1 + 240;
     }
     x = src.get();
     a2 = ord.apply(x) & 0xff;
     if (a0 == 249) {
-      return 2288 + 256 * a1 + a2;
+      return 2288L + 256 * a1 + a2;
     }
     x = src.get();
     a3 = ord.apply(x) & 0xff;
     if (a0 == 250) {
-      return (a1 << 16) | (a2 << 8) | a3;
+      return ((long) a1 << 16L) | (a2 << 8) | a3;
     }
     x = src.get();
     a4 = ord.apply(x) & 0xff;
@@ -665,7 +665,8 @@ public class OrderedBytes {
       dst.put((byte) ((2 * d + 1) & 0xff));
       abs = abs.subtract(BigDecimal.valueOf(d));
     }
-    a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x
+    // terminal digit should be 2x
+    a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
     if (isNeg) {
       // negative values encoded as ~M
       DESCENDING.apply(a, offset + startM, dst.getPosition() - startM);
@@ -749,8 +750,8 @@ public class OrderedBytes {
       dst.put((byte) (2 * d + 1));
       abs = abs.subtract(BigDecimal.valueOf(d));
     }
-
-    a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x
+    // terminal digit should be 2x
+    a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
     if (isNeg) {
       // negative values encoded as ~M
       DESCENDING.apply(a, offset + startM, dst.getPosition() - startM);
@@ -1065,7 +1066,8 @@ public class OrderedBytes {
       if (s > 1) {
         dst.put((byte) (0x7f & t));
       } else {
-        dst.getBytes()[offset + dst.getPosition() - 1] &= 0x7f;
+        dst.getBytes()[offset + dst.getPosition() - 1] =
+          (byte) (dst.getBytes()[offset + dst.getPosition() - 1] & 0x7f);
       }
     }
     ord.apply(dst.getBytes(), offset + start, dst.getPosition() - start);
@@ -1118,7 +1120,7 @@ public class OrderedBytes {
         ret.put((byte) (t | ((ord.apply(a[offset + i]) & 0x7f) >>> s)));
       }
       if (i == end) break;
-      t = (byte) ((ord.apply(a[offset + i]) << 8 - s) & 0xff);
+      t = (byte) ((ord.apply(a[offset + i]) << (8 - s)) & 0xff);
       s = s == 1 ? 7 : s - 1;
     }
     src.setPosition(end);
@@ -1374,7 +1376,7 @@ public class OrderedBytes {
   public static int encodeFloat32(PositionedByteRange dst, float val, Order ord) {
     final int offset = dst.getOffset(), start = dst.getPosition();
     int i = Float.floatToIntBits(val);
-    i ^= ((i >> Integer.SIZE - 1) | Integer.MIN_VALUE);
+    i ^= ((i >> (Integer.SIZE - 1)) | Integer.MIN_VALUE);
     dst.put(FIXED_FLOAT32)
         .put((byte) (i >> 24))
         .put((byte) (i >> 16))
@@ -1396,7 +1398,7 @@ public class OrderedBytes {
     for (int i = 1; i < 4; i++) {
       val = (val << 8) + (ord.apply(src.get()) & 0xff);
     }
-    val ^= (~val >> Integer.SIZE - 1) | Integer.MIN_VALUE;
+    val ^= (~val >> (Integer.SIZE - 1)) | Integer.MIN_VALUE;
     return Float.intBitsToFloat(val);
   }
 
@@ -1468,7 +1470,7 @@ public class OrderedBytes {
   public static int encodeFloat64(PositionedByteRange dst, double val, Order ord) {
     final int offset = dst.getOffset(), start = dst.getPosition();
     long lng = Double.doubleToLongBits(val);
-    lng ^= ((lng >> Long.SIZE - 1) | Long.MIN_VALUE);
+    lng ^= ((lng >> (Long.SIZE - 1)) | Long.MIN_VALUE);
     dst.put(FIXED_FLOAT64)
         .put((byte) (lng >> 56))
         .put((byte) (lng >> 48))
@@ -1494,7 +1496,7 @@ public class OrderedBytes {
     for (int i = 1; i < 8; i++) {
       val = (val << 8) + (ord.apply(src.get()) & 0xff);
     }
-    val ^= (~val >> Long.SIZE - 1) | Long.MIN_VALUE;
+    val ^= (~val >> (Long.SIZE - 1)) | Long.MIN_VALUE;
     return Double.longBitsToDouble(val);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
index 36f2fce..02b9b3f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
@@ -338,6 +338,7 @@ abstract class Striped64 extends Number {
         try {
             return java.security.AccessController.doPrivileged
             (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
+                @Override
                 public sun.misc.Unsafe run() throws Exception {
                     Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
                     for (java.lang.reflect.Field f : k.getDeclaredFields()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 5c2bc12..fa02b25 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -18,11 +18,13 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.lang.Thread.UncaughtExceptionHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -298,7 +300,8 @@ public class Threads {
       if (PRINT_THREAD_INFO_METHOD_WITH_PRINTSTREAM) {
         PRINT_THREAD_INFO_METHOD.invoke(null, stream, title);
       } else {
-        PRINT_THREAD_INFO_METHOD.invoke(null, new PrintWriter(stream), title);
+        PRINT_THREAD_INFO_METHOD.invoke(null,
+          new PrintWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8)), title);
       }
     } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
       throw new RuntimeException(e.getCause());

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
index 1438ab7..059ed0e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
@@ -40,6 +40,7 @@ public class Triple<A, B, C> {
     return new Triple<A, B, C>(first, second, third);
   }
 
+  @Override
   public int hashCode() {
     int hashFirst = (first != null ? first.hashCode() : 0);
     int hashSecond = (second != null ? second.hashCode() : 0);
@@ -48,6 +49,7 @@ public class Triple<A, B, C> {
     return (hashFirst >> 1) ^ hashSecond ^ (hashThird << 1);
   }
 
+  @Override
   public boolean equals(Object obj) {
     if (!(obj instanceof Triple)) {
       return false;
@@ -65,6 +67,7 @@ public class Triple<A, B, C> {
     return true;
   }
 
+  @Override
   public String toString() {
     return "(" + first + ", " + second + "," + third + " )";
   }
@@ -93,6 +96,3 @@ public class Triple<A, B, C> {
     this.third = third;
   }
 }
-
-
-

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
index 97bce75..9ebff7d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
@@ -89,7 +89,7 @@ public final class UnsafeAccess {
       destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
       destBase = dest.array();
     }
-    long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET;
+    long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET;
     unsafeCopy(src, srcAddress, destBase, destAddress, length);
   }
 
@@ -123,7 +123,7 @@ public final class UnsafeAccess {
       srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
       srcBase = src.array();
     }
-    long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET;
+    long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET;
     unsafeCopy(srcBase, srcAddress, dest, destAddress, length);
   }
 
@@ -144,13 +144,13 @@ public final class UnsafeAccess {
     if (src.isDirect()) {
       srcAddress = srcOffset + ((DirectBuffer) src).address();
     } else {
-      srcAddress = srcOffset +  src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
+      srcAddress = (long) srcOffset +  src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
       srcBase = src.array();
     }
     if (dest.isDirect()) {
       destAddress = destOffset + ((DirectBuffer) dest).address();
     } else {
-      destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
+      destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
       destBase = dest.array();
     }
     unsafeCopy(srcBase, srcAddress, destBase, destAddress, length);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index 2e0436c..a288692 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -287,7 +287,7 @@ public class ClassFinder {
     return null;
   }
 
-  private class FileFilterWithName implements FileFilter {
+  private static class FileFilterWithName implements FileFilter {
     private FileNameFilter nameFilter;
 
     public FileFilterWithName(FileNameFilter nameFilter) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index b7361bf..2cdab5d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -203,6 +203,7 @@ public class HBaseCommonTestingUtility {
         LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex);
       }
     } while (ntries < 30);
-    return ntries < 30;
+
+    return false;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index 435f8bb..7080078 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
 import static org.junit.Assert.*;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.NavigableMap;
@@ -52,7 +53,7 @@ public class TestCellUtil {
   /**
    * CellScanner used in test.
    */
-  private class TestCellScanner implements CellScanner {
+  private static class TestCellScanner implements CellScanner {
     private int count = 0;
     private Cell current = null;
     private final int cellsCount;
@@ -80,7 +81,7 @@ public class TestCellUtil {
   /**
    * Cell used in test. Has row only.
    */
-  private class TestCell implements Cell {
+  private static class TestCell implements Cell {
     private final byte [] row;
 
     TestCell(final int i) {
@@ -331,7 +332,8 @@ public class TestCellUtil {
   @Test
   public void testFindCommonPrefixInFlatKey() {
     // The whole key matching case
-    KeyValue kv1 = new KeyValue("r1".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
+    KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8),
+      "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
     Assert.assertEquals(kv1.getKeyLength(),
         CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true));
     Assert.assertEquals(kv1.getKeyLength(),
@@ -339,30 +341,35 @@ public class TestCellUtil {
     Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE,
         CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false));
     // The rk length itself mismatch
-    KeyValue kv2 = new KeyValue("r12".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
+    KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8),
+      "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
     Assert.assertEquals(1, CellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, true));
     // part of rk is same
-    KeyValue kv3 = new KeyValue("r14".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
-    Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes().length,
+    KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+      "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
+    Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes(StandardCharsets.UTF_8).length,
         CellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true));
     // entire rk is same but different cf name
-    KeyValue kv4 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q1".getBytes(), null);
+    KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+      "f2".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
-        + "f".getBytes().length, CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true));
+        + "f".getBytes(StandardCharsets.UTF_8).length,
+        CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true));
     // rk and family are same and part of qualifier
-    KeyValue kv5 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q123".getBytes(), null);
+    KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+      "f2".getBytes(StandardCharsets.UTF_8), "q123".getBytes(StandardCharsets.UTF_8), null);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
         + kv4.getFamilyLength() + kv4.getQualifierLength(),
         CellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true));
     // rk, cf and q are same. ts differs
-    KeyValue kv6 = new KeyValue("rk".getBytes(), 1234L);
-    KeyValue kv7 = new KeyValue("rk".getBytes(), 1235L);
+    KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L);
+    KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L);
     // only last byte out of 8 ts bytes in ts part differs
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
         + kv6.getFamilyLength() + kv6.getQualifierLength() + 7,
         CellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true));
     // rk, cf, q and ts are same. Only type differs
-    KeyValue kv8 = new KeyValue("rk".getBytes(), 1234L, Type.Delete);
+    KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, Type.Delete);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
         + kv6.getFamilyLength() + kv6.getQualifierLength() + KeyValue.TIMESTAMP_SIZE,
         CellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true));

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
index e5546f6..abca0d7 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
@@ -297,10 +297,10 @@ public class TestChoreService {
       }
     };
 
-    assertEquals("Name construction failed", chore1.getName(), NAME);
-    assertEquals("Period construction failed", chore1.getPeriod(), PERIOD);
-    assertEquals("Initial Delay construction failed", chore1.getInitialDelay(), VALID_DELAY);
-    assertEquals("TimeUnit construction failed", chore1.getTimeUnit(), UNIT);
+    assertEquals("Name construction failed", NAME, chore1.getName());
+    assertEquals("Period construction failed", PERIOD, chore1.getPeriod());
+    assertEquals("Initial Delay construction failed", VALID_DELAY, chore1.getInitialDelay());
+    assertEquals("TimeUnit construction failed", UNIT, chore1.getTimeUnit());
 
     ScheduledChore invalidDelayChore =
         new ScheduledChore(NAME, new SampleStopper(), PERIOD, INVALID_DELAY, UNIT) {
@@ -475,7 +475,7 @@ public class TestChoreService {
       Thread.sleep(chorePeriod * 10);
       assertEquals("Chores are missing their start time. Should expand core pool size",
         service.getNumberOfScheduledChores(), service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 5);
+      assertEquals(5, service.getNumberOfChoresMissingStartTime());
 
       // Now we begin to cancel the chores that caused an increase in the core thread pool of the
       // ChoreService. These cancellations should cause a decrease in the core thread pool.
@@ -483,31 +483,31 @@ public class TestChoreService {
       Thread.sleep(chorePeriod * 10);
       assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
         service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 4);
+      assertEquals(4, service.getNumberOfChoresMissingStartTime());
 
       slowChore4.cancel();
       Thread.sleep(chorePeriod * 10);
       assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
         service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 3);
+      assertEquals(3, service.getNumberOfChoresMissingStartTime());
 
       slowChore3.cancel();
       Thread.sleep(chorePeriod * 10);
       assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
         service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 2);
+      assertEquals(2, service.getNumberOfChoresMissingStartTime());
 
       slowChore2.cancel();
       Thread.sleep(chorePeriod * 10);
       assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
         service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 1);
+      assertEquals(1, service.getNumberOfChoresMissingStartTime());
 
       slowChore1.cancel();
       Thread.sleep(chorePeriod * 10);
       assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
         service.getCorePoolSize());
-      assertEquals(service.getNumberOfChoresMissingStartTime(), 0);
+      assertEquals(0, service.getNumberOfChoresMissingStartTime());
     } finally {
       shutdownService(service);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
index 2a8d1a2..1aa052b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
@@ -191,10 +191,9 @@ public class TestHBaseConfiguration {
       }
       // Instantiate Hadoop CredentialProviderFactory
       try {
-        hadoopCredProviderFactory = hadoopCredProviderFactoryClz.newInstance();
-      } catch (InstantiationException e) {
-        return false;
-      } catch (IllegalAccessException e) {
+        hadoopCredProviderFactory =
+          hadoopCredProviderFactoryClz.getDeclaredConstructor().newInstance();
+      } catch (Exception e) {
         return false;
       }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index 3baf729..7d3aa0e 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -640,7 +640,7 @@ public class TestKeyValue extends TestCase {
     assertTrue(kvA2.equals(deSerKV2));
   }
 
-  private class MockKeyValue implements Cell {
+  private static class MockKeyValue implements Cell {
     private final KeyValue kv;
 
     public MockKeyValue(KeyValue kv) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
index 6c18dc0..9769df0 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
@@ -67,7 +67,7 @@ public class TestKeyValueCodec {
     Codec.Encoder encoder = kvc.getEncoder(dos);
     final KeyValue kv =
       new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
-    final long length = kv.getLength() + Bytes.SIZEOF_INT; 
+    final long length = (long) kv.getLength() + Bytes.SIZEOF_INT;
     encoder.write(kv);
     encoder.flush();
     dos.close();
@@ -97,7 +97,7 @@ public class TestKeyValueCodec {
       new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
     final KeyValue kv3 =
       new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
-    final long length = kv1.getLength() + Bytes.SIZEOF_INT; 
+    final long length = (long) kv1.getLength() + Bytes.SIZEOF_INT;
     encoder.write(kv1);
     encoder.write(kv2);
     encoder.write(kv3);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
index 95f8ba1..9b444f5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
@@ -131,8 +131,8 @@ public class TestCipherProvider {
     Cipher a = Encryption.getCipher(conf, "TEST");
     assertNotNull(a);
     assertTrue(a.getProvider() instanceof MyCipherProvider);
-    assertEquals(a.getName(), "TEST");
-    assertEquals(a.getKeyLength(), 0);
+    assertEquals("TEST", a.getName());
+    assertEquals(0, a.getKeyLength());
   }
 
   @Test
@@ -147,7 +147,7 @@ public class TestCipherProvider {
     assertNotNull(a);
     assertTrue(a.getProvider() instanceof DefaultCipherProvider);
     assertEquals(a.getName(), algorithm);
-    assertEquals(a.getKeyLength(), AES.KEY_LENGTH);
+    assertEquals(AES.KEY_LENGTH, a.getKeyLength());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
index 9c98272..fcb0b51 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
@@ -44,9 +44,9 @@ public class TestKeyProvider {
 
     Key key = provider.getKey("foo");
     assertNotNull("Test provider did not return a key as expected", key);
-    assertEquals("Test provider did not create a key for AES", key.getAlgorithm(), "AES");
-    assertEquals("Test provider did not create a key of adequate length",
-      key.getEncoded().length, AES.KEY_LENGTH);
+    assertEquals("Test provider did not create a key for AES", "AES", key.getAlgorithm());
+    assertEquals("Test provider did not create a key of adequate length", AES.KEY_LENGTH,
+      key.getEncoded().length);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
index 9e38103..bc6edb8 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotNull;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.security.Key;
 import java.security.KeyStore;
 import java.security.MessageDigest;
@@ -51,7 +52,7 @@ public class TestKeyStoreKeyProvider {
 
   @BeforeClass
   public static void setUp() throws Exception {
-    KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes());
+    KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8));
     // Create a JKECS store containing a test secret key
     KeyStore store = KeyStore.getInstance("JCEKS");
     store.load(null, PASSWORD.toCharArray());

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
index 65260ea..55cad54 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
@@ -53,8 +53,8 @@ public class TestAES {
   public void testAESAlgorithm() throws Exception {
     Configuration conf = HBaseConfiguration.create();
     Cipher aes = Encryption.getCipher(conf, "AES");
-    assertEquals(aes.getKeyLength(), AES.KEY_LENGTH);
-    assertEquals(aes.getIvLength(), AES.IV_LENGTH);
+    assertEquals(AES.KEY_LENGTH, aes.getKeyLength());
+    assertEquals(AES.IV_LENGTH, aes.getIvLength());
     Encryptor e = aes.getEncryptor();
     e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES"));
     e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff"));
@@ -89,8 +89,7 @@ public class TestAES {
     DefaultCipherProvider.getInstance().setConf(conf);
 
     AES aes = new AES(DefaultCipherProvider.getInstance());
-    assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(),
-      "TestRNG");
+    assertEquals("AES did not find alternate RNG", "TestRNG", aes.getRNG().getAlgorithm());
   }
 
   static class TestProvider extends Provider {
@@ -98,6 +97,7 @@ public class TestAES {
     public TestProvider() {
       super("TEST", 1.0, "Test provider");
       AccessController.doPrivileged(new PrivilegedAction<Object>() {
+        @Override
         public Object run() {
           put("SecureRandom.TestRNG", TestAES.class.getName() + "$TestRNG");
           return null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java
index 6d16ec2..f8d0c22 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java
@@ -78,7 +78,7 @@ public class TestLRUDictionary {
     rand.nextBytes(testBytes);
 
     // Verify that our randomly generated array doesn't exist in the dictionary
-    assertEquals(testee.findEntry(testBytes, 0, testBytes.length), -1);
+    assertEquals(-1, testee.findEntry(testBytes, 0, testBytes.length));
 
     // now since we looked up an entry, we should have added it to the
     // dictionary, so it isn't empty

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
index 71b4cd1..f02087c 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 
 import java.lang.reflect.Constructor;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Comparator;
@@ -70,10 +71,15 @@ public class TestStruct {
     };
 
     Object[][] pojo2Args = {
-        new Object[] { new byte[0], "it".getBytes(), "was", "the".getBytes() },
-        new Object[] { "best".getBytes(), new byte[0], "of", "times,".getBytes() },
-        new Object[] { "it".getBytes(), "was".getBytes(), "", "the".getBytes() },
-        new Object[] { "worst".getBytes(), "of".getBytes(), "times,", new byte[0] },
+        new Object[] { new byte[0], "it".getBytes(StandardCharsets.UTF_8), "was",
+          "the".getBytes(StandardCharsets.UTF_8) },
+        new Object[] { "best".getBytes(StandardCharsets.UTF_8), new byte[0], "of",
+          "times,".getBytes(StandardCharsets.UTF_8) },
+        new Object[] { "it".getBytes(StandardCharsets.UTF_8),
+          "was".getBytes(StandardCharsets.UTF_8), "",
+          "the".getBytes(StandardCharsets.UTF_8) },
+        new Object[] { "worst".getBytes(StandardCharsets.UTF_8),
+          "of".getBytes(StandardCharsets.UTF_8), "times,", new byte[0] },
         new Object[] { new byte[0], new byte[0], "", new byte[0] },
     };
 
@@ -127,19 +133,54 @@ public class TestStruct {
     @Override
     public int compareTo(Pojo1 o) {
       int cmp = stringFieldAsc.compareTo(o.stringFieldAsc);
-      if (cmp != 0) return cmp;
+      if (cmp != 0) {
+        return cmp;
+      }
       cmp = Integer.valueOf(intFieldAsc).compareTo(Integer.valueOf(o.intFieldAsc));
-      if (cmp != 0) return cmp;
+      if (cmp != 0) {
+        return cmp;
+      }
       return Double.compare(doubleFieldAsc, o.doubleFieldAsc);
     }
 
     @Override
-    public boolean equals(Object o) {
-      if (this == o) return true;
-      if (null == o) return false;
-      if (!(o instanceof Pojo1)) return false;
-      Pojo1 that = (Pojo1) o;
-      return 0 == this.compareTo(that);
+    public int hashCode() {
+      final int prime = 31;
+      int result = 1;
+      long temp;
+      temp = Double.doubleToLongBits(doubleFieldAsc);
+      result = prime * result + (int) (temp ^ (temp >>> 32));
+      result = prime * result + intFieldAsc;
+      result = prime * result + ((stringFieldAsc == null) ? 0 : stringFieldAsc.hashCode());
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj) {
+        return true;
+      }
+      if (obj == null) {
+        return false;
+      }
+      if (getClass() != obj.getClass()) {
+        return false;
+      }
+      Pojo1 other = (Pojo1) obj;
+      if (Double.doubleToLongBits(doubleFieldAsc) != Double.doubleToLongBits(other.doubleFieldAsc)) {
+        return false;
+      }
+      if (intFieldAsc != other.intFieldAsc) {
+        return false;
+      }
+      if (stringFieldAsc == null) {
+        if (other.stringFieldAsc != null) {
+          return false;
+        }
+      } else if (!stringFieldAsc.equals(other.stringFieldAsc)) {
+        return false;
+      }
+      return true;
     }
   }
 
@@ -178,24 +219,69 @@ public class TestStruct {
     @Override
     public int compareTo(Pojo2 o) {
       int cmp = NULL_SAFE_BYTES_COMPARATOR.compare(byteField1Asc, o.byteField1Asc);
-      if (cmp != 0) return cmp;
+      if (cmp != 0) {
+        return cmp;
+      }
       cmp = -NULL_SAFE_BYTES_COMPARATOR.compare(byteField2Dsc, o.byteField2Dsc);
-      if (cmp != 0) return cmp;
-      if (stringFieldDsc == o.stringFieldDsc) cmp = 0;
-      else if (null == stringFieldDsc) cmp = 1;
-      else if (null == o.stringFieldDsc) cmp = -1;
+      if (cmp != 0) {
+        return cmp;
+      }
+      if (null == stringFieldDsc) {
+        cmp = 1;
+      }
+      else if (null == o.stringFieldDsc) {
+        cmp = -1;
+      }
+      else if (stringFieldDsc.equals(o.stringFieldDsc)) {
+        cmp = 0;
+      }
       else cmp = -stringFieldDsc.compareTo(o.stringFieldDsc);
-      if (cmp != 0) return cmp;
+      if (cmp != 0) {
+        return cmp;
+      }
       return -NULL_SAFE_BYTES_COMPARATOR.compare(byteField3Dsc, o.byteField3Dsc);
     }
 
     @Override
-    public boolean equals(Object o) {
-      if (this == o) return true;
-      if (null == o) return false;
-      if (!(o instanceof Pojo2)) return false;
-      Pojo2 that = (Pojo2) o;
-      return 0 == this.compareTo(that);
+    public int hashCode() {
+      final int prime = 31;
+      int result = 1;
+      result = prime * result + Arrays.hashCode(byteField1Asc);
+      result = prime * result + Arrays.hashCode(byteField2Dsc);
+      result = prime * result + Arrays.hashCode(byteField3Dsc);
+      result = prime * result + ((stringFieldDsc == null) ? 0 : stringFieldDsc.hashCode());
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj) {
+        return true;
+      }
+      if (obj == null) {
+        return false;
+      }
+      if (getClass() != obj.getClass()) {
+        return false;
+      }
+      Pojo2 other = (Pojo2) obj;
+      if (!Arrays.equals(byteField1Asc, other.byteField1Asc)) {
+        return false;
+      }
+      if (!Arrays.equals(byteField2Dsc, other.byteField2Dsc)) {
+        return false;
+      }
+      if (!Arrays.equals(byteField3Dsc, other.byteField3Dsc)) {
+        return false;
+      }
+      if (stringFieldDsc == null) {
+        if (other.stringFieldDsc != null) {
+          return false;
+        }
+      } else if (!stringFieldDsc.equals(other.stringFieldDsc)) {
+        return false;
+      }
+      return true;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
index ef213ee..79d2f3f 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
@@ -24,6 +24,8 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.jar.JarEntry;
@@ -127,7 +129,7 @@ public class ClassLoaderTestHelper {
     File srcDirPath = new File(srcDir.toString());
     srcDirPath.mkdirs();
     File sourceCodeFile = new File(srcDir.toString(), className + ".java");
-    BufferedWriter bw = new BufferedWriter(new FileWriter(sourceCodeFile));
+    BufferedWriter bw = Files.newBufferedWriter(sourceCodeFile.toPath(), StandardCharsets.UTF_8);
     bw.write(javaCode);
     bw.close();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index 3ec0afb..7771f87 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -157,9 +157,9 @@ public class TestBytes extends TestCase {
     float [] floats = {-1f, 123.123f, Float.MAX_VALUE};
     for (int i = 0; i < floats.length; i++) {
       byte [] b = Bytes.toBytes(floats[i]);
-      assertEquals(floats[i], Bytes.toFloat(b));
+      assertEquals(floats[i], Bytes.toFloat(b), 0.0f);
       byte [] b2 = bytesWithOffset(b);
-      assertEquals(floats[i], Bytes.toFloat(b2, 1));
+      assertEquals(floats[i], Bytes.toFloat(b2, 1), 0.0f);
     }
   }
 
@@ -167,9 +167,9 @@ public class TestBytes extends TestCase {
     double [] doubles = {Double.MIN_VALUE, Double.MAX_VALUE};
     for (int i = 0; i < doubles.length; i++) {
       byte [] b = Bytes.toBytes(doubles[i]);
-      assertEquals(doubles[i], Bytes.toDouble(b));
+      assertEquals(doubles[i], Bytes.toDouble(b), 0.0);
       byte [] b2 = bytesWithOffset(b);
-      assertEquals(doubles[i], Bytes.toDouble(b2, 1));
+      assertEquals(doubles[i], Bytes.toDouble(b2, 1), 0.0);
     }
   }
 
@@ -431,13 +431,13 @@ public class TestBytes extends TestCase {
 
   public void testUnsignedBinarySearch(){
     byte[] bytes = new byte[]{0,5,123,127,-128,-100,-1};
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5), 1);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127), 3);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128), 4);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100), 5);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1), 6);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2), -1-1);
-    Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5), -6-1);
+    Assert.assertEquals(1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5));
+    Assert.assertEquals(3, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127));
+    Assert.assertEquals(4, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128));
+    Assert.assertEquals(5, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100));
+    Assert.assertEquals(6, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1));
+    Assert.assertEquals(-1-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2));
+    Assert.assertEquals(-6-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5));
   }
 
   public void testUnsignedIncrement(){

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
index 9b4ddb5..5b59f0a 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
@@ -115,6 +115,8 @@ public class TestConcatenatedLists {
   }
 
   @SuppressWarnings("ModifyingCollectionWithItself")
+  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="DMI_VACUOUS_SELF_COLLECTION_CALL",
+    justification="Intended vacuous containsAll call on 'c'")
   private void verify(ConcatenatedLists<Long> c, int last) {
     assertEquals((last == -1), c.isEmpty());
     assertEquals(last + 1, c.size());

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
index e1048da..dedd269 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
@@ -86,8 +86,11 @@ public class TestCoprocessorClassLoader {
   private void checkingLibJarName(String jarName, String libPrefix) throws Exception {
     File tmpFolder = new File(ClassLoaderTestHelper.localDirPath(conf), "tmp");
     if (tmpFolder.exists()) { // Clean up the tmp folder
-      for (File f: tmpFolder.listFiles()) {
-        f.delete();
+      File[] files = tmpFolder.listFiles();
+      if (files != null) {
+        for (File f: files) {
+          f.delete();
+        }
       }
     }
     String className = "CheckingLibJarName";
@@ -101,10 +104,13 @@ public class TestCoprocessorClassLoader {
     ClassLoader classLoader = CoprocessorClassLoader.getClassLoader(path, parent, "112", conf);
     assertNotNull("Classloader should be created", classLoader);
     String fileToLookFor = "." + className + ".jar";
-    for (String f: tmpFolder.list()) {
-      if (f.endsWith(fileToLookFor) && f.contains(jarName)) {
-        // Cool, found it;
-        return;
+    String[] files = tmpFolder.list();
+    if (files != null) {
+      for (String f: files) {
+        if (f.endsWith(fileToLookFor) && f.contains(jarName)) {
+          // Cool, found it;
+          return;
+        }
       }
     }
     fail("Could not find the expected lib jar file");

http://git-wip-us.apache.org/repos/asf/hbase/blob/c179d514/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
index 5f575e6..3767e87 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.util;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.Random;
 import java.util.Set;
@@ -40,8 +41,8 @@ public class TestLoadTestKVGenerator {
   @Test
   public void testValueLength() {
     for (int i = 0; i < 1000; ++i) {
-      byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(),
-          String.valueOf(rand.nextInt()).getBytes());
+      byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(StandardCharsets.UTF_8),
+          String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8));
       assertTrue(MIN_LEN <= v.length);
       assertTrue(v.length <= MAX_LEN);
     }
@@ -51,8 +52,8 @@ public class TestLoadTestKVGenerator {
   public void testVerification() {
     for (int i = 0; i < 1000; ++i) {
       for (int qualIndex = 0; qualIndex < 20; ++qualIndex) {
-        byte[] qual = String.valueOf(qualIndex).getBytes();
-        byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
+        byte[] qual = String.valueOf(qualIndex).getBytes(StandardCharsets.UTF_8);
+        byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(StandardCharsets.UTF_8);
         byte[] v = gen.generateRandomSizeValue(rowKey, qual);
         assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual));
         v[0]++;