You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2018/01/23 01:14:40 UTC

[5/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone warnings in hbase-server (branch-2)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
index 2a35365..bd0efd8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
@@ -93,7 +93,7 @@ public class TestMasterCoprocessorExceptionWithAbort {
         fail("BuggyMasterObserver failed to throw an exception.");
       } catch (IOException e) {
         assertEquals("HBaseAdmin threw an interrupted IOException as expected.",
-            e.getClass().getName(), "java.io.InterruptedIOException");
+            "java.io.InterruptedIOException", e.getClass().getName());
       }
    }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 431e73e..ea817ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -932,6 +932,7 @@ public class TestMasterObserver {
       return preModifyTableActionCalled && !postCompletedModifyTableActionCalled;
     }
 
+    @Override
     public void preEnableTableAction(
         final ObserverContext<MasterCoprocessorEnvironment> ctx, final TableName tableName)
         throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 166dfdd..09aa4ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -341,6 +341,7 @@ public class TestWALObserver {
     User user = HBaseTestingUtility.getDifferentUser(newConf,
         ".replay.wal.secondtime");
     user.runAs(new PrivilegedExceptionAction<Void>() {
+      @Override
       public Void run() throws Exception {
         Path p = runWALSplit(newConf);
         LOG.info("WALSplit path == " + p);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
index b0b17f9..a3f2f1c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
@@ -145,7 +145,7 @@ public class TestExecutorService {
   }
 
   public static class TestEventHandler extends EventHandler {
-    private AtomicBoolean lock;
+    private final AtomicBoolean lock;
     private AtomicInteger counter;
 
     public TestEventHandler(Server server, EventType eventType,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
index c31eebf..0e5fdb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
@@ -103,13 +103,13 @@ public class TestBitComparator {
   private void testOperation(byte[] data, byte[] comparatorBytes, BitComparator.BitwiseOp operator,
       int expected) {
     BitComparator comparator = new BitComparator(comparatorBytes, operator);
-    assertEquals(comparator.compareTo(data), expected);
+    assertEquals(expected, comparator.compareTo(data));
   }
 
   private void testOperation(ByteBuffer data, byte[] comparatorBytes,
       BitComparator.BitwiseOp operator, int expected) {
     BitComparator comparator = new BitComparator(comparatorBytes, operator);
-    assertEquals(comparator.compareTo(data, 0, data.capacity()), expected);
+    assertEquals(expected, comparator.compareTo(data, 0, data.capacity()));
   }
 
   @Test
@@ -142,13 +142,13 @@ public class TestBitComparator {
   private void testOperationWithOffset(byte[] data, byte[] comparatorBytes,
       BitComparator.BitwiseOp operator, int expected) {
     BitComparator comparator = new BitComparator(comparatorBytes, operator);
-    assertEquals(comparator.compareTo(data, 1, comparatorBytes.length), expected);
+    assertEquals(expected, comparator.compareTo(data, 1, comparatorBytes.length));
   }
 
   private void testOperationWithOffset(ByteBuffer data, byte[] comparatorBytes,
       BitComparator.BitwiseOp operator, int expected) {
     BitComparator comparator = new BitComparator(comparatorBytes, operator);
-    assertEquals(comparator.compareTo(data, 1, comparatorBytes.length), expected);
+    assertEquals(expected, comparator.compareTo(data, 1, comparatorBytes.length));
   }
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index ca2c88b..ec11ce0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -136,8 +136,8 @@ public class TestFilter {
       Bytes.toBytes("f1"), Bytes.toBytes("f2")
     };
 
-  private long numRows = ROWS_ONE.length + ROWS_TWO.length;
-  private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
+  private long numRows = (long) ROWS_ONE.length + ROWS_TWO.length;
+  private long colsPerRow = (long) FAMILIES.length * QUALIFIERS_ONE.length;
 
   @Before
   public void setUp() throws Exception {
@@ -1756,15 +1756,14 @@ public class TestFilter {
         assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx]));
         assertFalse("Should not have returned whole value", CellUtil.matchingValue(kv, kvs[idx]));
         if (useLen) {
-          assertEquals("Value in result is not SIZEOF_INT",
-                     kv.getValueLength(), Bytes.SIZEOF_INT);
+          assertEquals("Value in result is not SIZEOF_INT", Bytes.SIZEOF_INT, kv.getValueLength());
           LOG.info("idx = "  + idx + ", len=" + kvs[idx].getValueLength()
               + ", actual=" +  Bytes.toInt(CellUtil.cloneValue(kv)));
           assertEquals("Scan value should be the length of the actual value. ",
                      kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv)) );
           LOG.info("good");
         } else {
-          assertEquals("Value in result is not empty", kv.getValueLength(), 0);
+          assertEquals("Value in result is not empty", 0, kv.getValueLength());
         }
         idx++;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
index fdd7e77..ad5ee99 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
@@ -136,6 +136,7 @@ public class TestFilterFromRegionSide {
   public static class FirstSeveralCellsFilter extends FilterBase{
     private int count = 0;
 
+    @Override
     public void reset() {
       count = 0;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 6574d04..2d2a425 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -768,7 +768,7 @@ public class TestFilterList {
     MockFilter filter5 = new MockFilter(ReturnCode.SKIP);
     MockFilter filter6 = new MockFilter(ReturnCode.SEEK_NEXT_USING_HINT);
     FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, filter1, filter2);
-    assertEquals(filterList.filterCell(kv1), ReturnCode.INCLUDE);
+    assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1));
 
     filterList = new FilterList(Operator.MUST_PASS_ONE, filter2, filter3);
     assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv1));
@@ -936,6 +936,7 @@ public class TestFilterList {
   private static class MockNextRowFilter extends FilterBase {
     private int hitCount = 0;
 
+    @Override
     public ReturnCode filterCell(final Cell v) throws IOException {
       hitCount++;
       return ReturnCode.NEXT_ROW;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
index 8fa41e3..d470fac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
@@ -301,8 +301,8 @@ public class TestFilterSerialization {
 
     // Non-empty timestamp list
     LinkedList<Long> list = new LinkedList<>();
-    list.add(new Long(System.currentTimeMillis()));
-    list.add(new Long(System.currentTimeMillis()));
+    list.add(System.currentTimeMillis());
+    list.add(System.currentTimeMillis());
     timestampsFilter = new TimestampsFilter(list);
     assertTrue(timestampsFilter.areSerializedFieldsEqual(
       ProtobufUtil.toFilter(ProtobufUtil.toFilter(timestampsFilter))));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index 5f25b49..25ea358 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -86,7 +86,7 @@ public class TestFuzzyRowFilterEndToEnd {
     conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
       ConstantSizeRegionSplitPolicy.class.getName());
     // set no splits
-    conf.setLong(HConstants.HREGION_MAX_FILESIZE, ((long) 1024) * 1024 * 1024 * 10);
+    conf.setLong(HConstants.HREGION_MAX_FILESIZE, (1024L) * 1024 * 1024 * 10);
 
     TEST_UTIL.startMiniCluster();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
index ebccc34..768ab7a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
@@ -162,20 +162,24 @@ public class TestInvocationRecordFilter {
 
     private List<Cell> visitedKeyValues = new ArrayList<>();
 
+    @Override
     public void reset() {
       visitedKeyValues.clear();
     }
 
+    @Override
     public ReturnCode filterCell(final Cell ignored) {
       visitedKeyValues.add(ignored);
       return ReturnCode.INCLUDE;
     }
 
+    @Override
     public void filterRowCells(List<Cell> kvs) {
       kvs.clear();
       kvs.addAll(visitedKeyValues);
     }
 
+    @Override
     public boolean hasFilterRow() {
       return true;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
index bcd239d..c5200f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
@@ -18,9 +18,12 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -89,7 +92,7 @@ public class TestParseFilter {
     String filterString = " PrefixFilter('row' ) ";
     PrefixFilter prefixFilter = doTestFilter(filterString, PrefixFilter.class);
     byte [] prefix = prefixFilter.getPrefix();
-    assertEquals(new String(prefix), "row");
+    assertEquals("row", new String(prefix, StandardCharsets.UTF_8));
 
 
     filterString = " PrefixFilter(row)";
@@ -107,7 +110,7 @@ public class TestParseFilter {
     ColumnPrefixFilter columnPrefixFilter =
       doTestFilter(filterString, ColumnPrefixFilter.class);
     byte [] columnPrefix = columnPrefixFilter.getPrefix();
-    assertEquals(new String(columnPrefix), "qualifier");
+    assertEquals("qualifier", new String(columnPrefix, StandardCharsets.UTF_8));
   }
 
   @Test
@@ -116,8 +119,8 @@ public class TestParseFilter {
     MultipleColumnPrefixFilter multipleColumnPrefixFilter =
       doTestFilter(filterString, MultipleColumnPrefixFilter.class);
     byte [][] prefixes = multipleColumnPrefixFilter.getPrefix();
-    assertEquals(new String(prefixes[0]), "qualifier1");
-    assertEquals(new String(prefixes[1]), "qualifier2");
+    assertEquals("qualifier1", new String(prefixes[0], StandardCharsets.UTF_8));
+    assertEquals("qualifier2", new String(prefixes[1], StandardCharsets.UTF_8));
   }
 
   @Test
@@ -126,7 +129,7 @@ public class TestParseFilter {
     ColumnCountGetFilter columnCountGetFilter =
       doTestFilter(filterString, ColumnCountGetFilter.class);
     int limit = columnCountGetFilter.getLimit();
-    assertEquals(limit, 4);
+    assertEquals(4, limit);
 
     filterString = " ColumnCountGetFilter('abc')";
     try {
@@ -151,7 +154,7 @@ public class TestParseFilter {
     PageFilter pageFilter =
       doTestFilter(filterString, PageFilter.class);
     long pageSize = pageFilter.getPageSize();
-    assertEquals(pageSize, 4);
+    assertEquals(4, pageSize);
 
     filterString = " PageFilter('123')";
     try {
@@ -168,9 +171,9 @@ public class TestParseFilter {
     ColumnPaginationFilter columnPaginationFilter =
       doTestFilter(filterString, ColumnPaginationFilter.class);
     int limit = columnPaginationFilter.getLimit();
-    assertEquals(limit, 4);
+    assertEquals(4, limit);
     int offset = columnPaginationFilter.getOffset();
-    assertEquals(offset, 6);
+    assertEquals(6, offset);
 
     filterString = " ColumnPaginationFilter('124')";
     try {
@@ -203,7 +206,7 @@ public class TestParseFilter {
     InclusiveStopFilter inclusiveStopFilter =
       doTestFilter(filterString, InclusiveStopFilter.class);
     byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
-    assertEquals(new String(stopRowKey), "row 3");
+    assertEquals("row 3", new String(stopRowKey, StandardCharsets.UTF_8));
   }
 
 
@@ -213,13 +216,13 @@ public class TestParseFilter {
     TimestampsFilter timestampsFilter =
       doTestFilter(filterString, TimestampsFilter.class);
     List<Long> timestamps = timestampsFilter.getTimestamps();
-    assertEquals(timestamps.size(), 2);
-    assertEquals(timestamps.get(0), new Long(6));
+    assertEquals(2, timestamps.size());
+    assertEquals(Long.valueOf(6), timestamps.get(0));
 
     filterString = "TimestampsFilter()";
     timestampsFilter = doTestFilter(filterString, TimestampsFilter.class);
     timestamps = timestampsFilter.getTimestamps();
-    assertEquals(timestamps.size(), 0);
+    assertEquals(0, timestamps.size());
 
     filterString = "TimestampsFilter(9223372036854775808, 6)";
     try {
@@ -246,7 +249,7 @@ public class TestParseFilter {
     assertEquals(CompareOperator.EQUAL, rowFilter.getCompareOperator());
     assertTrue(rowFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator) rowFilter.getComparator();
-    assertEquals("regionse", new String(binaryComparator.getValue()));
+    assertEquals("regionse", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -258,7 +261,7 @@ public class TestParseFilter {
     assertTrue(familyFilter.getComparator() instanceof BinaryPrefixComparator);
     BinaryPrefixComparator binaryPrefixComparator =
       (BinaryPrefixComparator) familyFilter.getComparator();
-    assertEquals("pre", new String(binaryPrefixComparator.getValue()));
+    assertEquals("pre", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -270,7 +273,7 @@ public class TestParseFilter {
     assertTrue(qualifierFilter.getComparator() instanceof RegexStringComparator);
     RegexStringComparator regexStringComparator =
       (RegexStringComparator) qualifierFilter.getComparator();
-    assertEquals("pre*", new String(regexStringComparator.getValue()));
+    assertEquals("pre*", new String(regexStringComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -282,7 +285,7 @@ public class TestParseFilter {
     assertTrue(valueFilter.getComparator() instanceof SubstringComparator);
     SubstringComparator substringComparator =
       (SubstringComparator) valueFilter.getComparator();
-    assertEquals("pre", new String(substringComparator.getValue()));
+    assertEquals("pre", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -290,8 +293,8 @@ public class TestParseFilter {
     String filterString = "ColumnRangeFilter('abc', true, 'xyz', false)";
     ColumnRangeFilter columnRangeFilter =
       doTestFilter(filterString, ColumnRangeFilter.class);
-    assertEquals("abc", new String(columnRangeFilter.getMinColumn()));
-    assertEquals("xyz", new String(columnRangeFilter.getMaxColumn()));
+    assertEquals("abc", new String(columnRangeFilter.getMinColumn(), StandardCharsets.UTF_8));
+    assertEquals("xyz", new String(columnRangeFilter.getMaxColumn(), StandardCharsets.UTF_8));
     assertTrue(columnRangeFilter.isMinColumnInclusive());
     assertFalse(columnRangeFilter.isMaxColumnInclusive());
   }
@@ -301,13 +304,14 @@ public class TestParseFilter {
     String filterString = "DependentColumnFilter('family', 'qualifier', true, =, 'binary:abc')";
     DependentColumnFilter dependentColumnFilter =
       doTestFilter(filterString, DependentColumnFilter.class);
-    assertEquals("family", new String(dependentColumnFilter.getFamily()));
-    assertEquals("qualifier", new String(dependentColumnFilter.getQualifier()));
+    assertEquals("family", new String(dependentColumnFilter.getFamily(), StandardCharsets.UTF_8));
+    assertEquals("qualifier",
+        new String(dependentColumnFilter.getQualifier(), StandardCharsets.UTF_8));
     assertTrue(dependentColumnFilter.getDropDependentColumn());
     assertEquals(CompareOperator.EQUAL, dependentColumnFilter.getCompareOperator());
     assertTrue(dependentColumnFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator)dependentColumnFilter.getComparator();
-    assertEquals("abc", new String(binaryComparator.getValue()));
+    assertEquals("abc", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -316,25 +320,27 @@ public class TestParseFilter {
       "('family', 'qualifier', >=, 'binary:a', true, false)";
     SingleColumnValueFilter singleColumnValueFilter =
       doTestFilter(filterString, SingleColumnValueFilter.class);
-    assertEquals("family", new String(singleColumnValueFilter.getFamily()));
-    assertEquals("qualifier", new String(singleColumnValueFilter.getQualifier()));
-    assertEquals(singleColumnValueFilter.getCompareOperator(), CompareOperator.GREATER_OR_EQUAL);
+    assertEquals("family", new String(singleColumnValueFilter.getFamily(), StandardCharsets.UTF_8));
+    assertEquals("qualifier",
+        new String(singleColumnValueFilter.getQualifier(), StandardCharsets.UTF_8));
+    assertEquals(CompareOperator.GREATER_OR_EQUAL, singleColumnValueFilter.getCompareOperator());
     assertTrue(singleColumnValueFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator) singleColumnValueFilter.getComparator();
-    assertEquals(new String(binaryComparator.getValue()), "a");
+    assertEquals("a", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
     assertTrue(singleColumnValueFilter.getFilterIfMissing());
     assertFalse(singleColumnValueFilter.getLatestVersionOnly());
 
 
     filterString = "SingleColumnValueFilter ('family', 'qualifier', >, 'binaryprefix:a')";
     singleColumnValueFilter = doTestFilter(filterString, SingleColumnValueFilter.class);
-    assertEquals("family", new String(singleColumnValueFilter.getFamily()));
-    assertEquals("qualifier", new String(singleColumnValueFilter.getQualifier()));
-    assertEquals(singleColumnValueFilter.getCompareOperator(), CompareOperator.GREATER);
+    assertEquals("family", new String(singleColumnValueFilter.getFamily(), StandardCharsets.UTF_8));
+    assertEquals("qualifier",
+        new String(singleColumnValueFilter.getQualifier(), StandardCharsets.UTF_8));
+    assertEquals(CompareOperator.GREATER, singleColumnValueFilter.getCompareOperator());
     assertTrue(singleColumnValueFilter.getComparator() instanceof BinaryPrefixComparator);
     BinaryPrefixComparator binaryPrefixComparator =
       (BinaryPrefixComparator) singleColumnValueFilter.getComparator();
-    assertEquals(new String(binaryPrefixComparator.getValue()), "a");
+    assertEquals("a", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
     assertFalse(singleColumnValueFilter.getFilterIfMissing());
     assertTrue(singleColumnValueFilter.getLatestVersionOnly());
   }
@@ -345,10 +351,13 @@ public class TestParseFilter {
       "SingleColumnValueExcludeFilter ('family', 'qualifier', <, 'binaryprefix:a')";
     SingleColumnValueExcludeFilter singleColumnValueExcludeFilter =
       doTestFilter(filterString, SingleColumnValueExcludeFilter.class);
-    assertEquals(singleColumnValueExcludeFilter.getCompareOperator(), CompareOperator.LESS);
-    assertEquals("family", new String(singleColumnValueExcludeFilter.getFamily()));
-    assertEquals("qualifier", new String(singleColumnValueExcludeFilter.getQualifier()));
-    assertEquals(new String(singleColumnValueExcludeFilter.getComparator().getValue()), "a");
+    assertEquals(CompareOperator.LESS, singleColumnValueExcludeFilter.getCompareOperator());
+    assertEquals("family",
+        new String(singleColumnValueExcludeFilter.getFamily(), StandardCharsets.UTF_8));
+    assertEquals("qualifier",
+        new String(singleColumnValueExcludeFilter.getQualifier(), StandardCharsets.UTF_8));
+    assertEquals("a", new String(singleColumnValueExcludeFilter.getComparator().getValue(),
+        StandardCharsets.UTF_8));
     assertFalse(singleColumnValueExcludeFilter.getFilterIfMissing());
     assertTrue(singleColumnValueExcludeFilter.getLatestVersionOnly());
 
@@ -356,14 +365,16 @@ public class TestParseFilter {
       "('family', 'qualifier', <=, 'binaryprefix:a', true, false)";
     singleColumnValueExcludeFilter =
       doTestFilter(filterString, SingleColumnValueExcludeFilter.class);
-    assertEquals("family", new String(singleColumnValueExcludeFilter.getFamily()));
-    assertEquals("qualifier", new String(singleColumnValueExcludeFilter.getQualifier()));
-    assertEquals(singleColumnValueExcludeFilter.getCompareOperator(),
-      CompareOperator.LESS_OR_EQUAL);
+    assertEquals("family",
+        new String(singleColumnValueExcludeFilter.getFamily(), StandardCharsets.UTF_8));
+    assertEquals("qualifier",
+        new String(singleColumnValueExcludeFilter.getQualifier(), StandardCharsets.UTF_8));
+    assertEquals(CompareOperator.LESS_OR_EQUAL,
+        singleColumnValueExcludeFilter.getCompareOperator());
     assertTrue(singleColumnValueExcludeFilter.getComparator() instanceof BinaryPrefixComparator);
     BinaryPrefixComparator binaryPrefixComparator =
       (BinaryPrefixComparator) singleColumnValueExcludeFilter.getComparator();
-    assertEquals(new String(binaryPrefixComparator.getValue()), "a");
+    assertEquals("a", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
     assertTrue(singleColumnValueExcludeFilter.getFilterIfMissing());
     assertFalse(singleColumnValueExcludeFilter.getLatestVersionOnly());
   }
@@ -379,7 +390,7 @@ public class TestParseFilter {
     assertEquals(CompareOperator.EQUAL, valueFilter.getCompareOperator());
     assertTrue(valueFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator) valueFilter.getComparator();
-    assertEquals("0", new String(binaryComparator.getValue()));
+    assertEquals("0", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -393,7 +404,7 @@ public class TestParseFilter {
     assertEquals(CompareOperator.NOT_EQUAL, rowFilter.getCompareOperator());
     assertTrue(rowFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator) rowFilter.getComparator();
-    assertEquals("row1", new String(binaryComparator.getValue()));
+    assertEquals("row1", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -407,7 +418,7 @@ public class TestParseFilter {
     assertTrue(filters.get(1) instanceof FirstKeyOnlyFilter);
     PrefixFilter PrefixFilter = (PrefixFilter) filters.get(0);
     byte [] prefix = PrefixFilter.getPrefix();
-    assertEquals(new String(prefix), "realtime");
+    assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
     FirstKeyOnlyFilter firstKeyOnlyFilter = (FirstKeyOnlyFilter) filters.get(1);
   }
 
@@ -420,7 +431,7 @@ public class TestParseFilter {
     ArrayList<Filter> filterListFilters = (ArrayList<Filter>) filterList.getFilters();
     assertTrue(filterListFilters.get(0) instanceof FilterList);
     assertTrue(filterListFilters.get(1) instanceof FamilyFilter);
-    assertEquals(filterList.getOperator(), FilterList.Operator.MUST_PASS_ONE);
+    assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator());
 
     filterList = (FilterList) filterListFilters.get(0);
     FamilyFilter familyFilter = (FamilyFilter) filterListFilters.get(1);
@@ -428,22 +439,22 @@ public class TestParseFilter {
     filterListFilters = (ArrayList<Filter>)filterList.getFilters();
     assertTrue(filterListFilters.get(0) instanceof PrefixFilter);
     assertTrue(filterListFilters.get(1) instanceof QualifierFilter);
-    assertEquals(filterList.getOperator(), FilterList.Operator.MUST_PASS_ALL);
+    assertEquals(FilterList.Operator.MUST_PASS_ALL, filterList.getOperator());
 
     assertEquals(CompareOperator.EQUAL, familyFilter.getCompareOperator());
     assertTrue(familyFilter.getComparator() instanceof BinaryComparator);
     BinaryComparator binaryComparator = (BinaryComparator) familyFilter.getComparator();
-    assertEquals("qualifier", new String(binaryComparator.getValue()));
+    assertEquals("qualifier", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
 
     PrefixFilter prefixFilter = (PrefixFilter) filterListFilters.get(0);
     byte [] prefix = prefixFilter.getPrefix();
-    assertEquals(new String(prefix), "realtime");
+    assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
 
     QualifierFilter qualifierFilter = (QualifierFilter) filterListFilters.get(1);
     assertEquals(CompareOperator.GREATER_OR_EQUAL, qualifierFilter.getCompareOperator());
     assertTrue(qualifierFilter.getComparator() instanceof BinaryComparator);
     binaryComparator = (BinaryComparator) qualifierFilter.getComparator();
-    assertEquals("e", new String(binaryComparator.getValue()));
+    assertEquals("e", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -466,7 +477,7 @@ public class TestParseFilter {
 
     ColumnPrefixFilter columnPrefixFilter = (ColumnPrefixFilter) filters.get(0);
     byte [] columnPrefix = columnPrefixFilter.getPrefix();
-    assertEquals(new String(columnPrefix), "realtime");
+    assertEquals("realtime", new String(columnPrefix, StandardCharsets.UTF_8));
 
     FirstKeyOnlyFilter firstKeyOnlyFilter = (FirstKeyOnlyFilter) filters.get(1);
 
@@ -477,7 +488,7 @@ public class TestParseFilter {
     assertTrue(familyFilter.getComparator() instanceof SubstringComparator);
     SubstringComparator substringComparator =
       (SubstringComparator) familyFilter.getComparator();
-    assertEquals("hihi", new String(substringComparator.getValue()));
+    assertEquals("hihi", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -497,7 +508,7 @@ public class TestParseFilter {
     SkipFilter skipFilter = (SkipFilter) filters.get(2);
 
     byte [] columnPrefix = columnPrefixFilter.getPrefix();
-    assertEquals(new String(columnPrefix), "realtime");
+    assertEquals("realtime", new String(columnPrefix, StandardCharsets.UTF_8));
 
     assertTrue(skipFilter.getFilter() instanceof FamilyFilter);
     FamilyFilter familyFilter = (FamilyFilter) skipFilter.getFilter();
@@ -506,7 +517,7 @@ public class TestParseFilter {
     assertTrue(familyFilter.getComparator() instanceof SubstringComparator);
     SubstringComparator substringComparator =
       (SubstringComparator) familyFilter.getComparator();
-    assertEquals("hihi", new String(substringComparator.getValue()));
+    assertEquals("hihi", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
   }
 
   @Test
@@ -537,7 +548,7 @@ public class TestParseFilter {
   }
 
   @Test
-  public void testIncorrectComparatorType () throws IOException {
+  public void testIncorrectComparatorType() throws IOException {
     String  filterString = "RowFilter ('>=' , 'binaryoperator:region')";
     try {
       doTestFilter(filterString, RowFilter.class);
@@ -584,7 +595,7 @@ public class TestParseFilter {
 
     PrefixFilter prefixFilter = (PrefixFilter)filters.get(0);
     byte [] prefix = prefixFilter.getPrefix();
-    assertEquals(new String(prefix), "realtime");
+    assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
   }
 
   @Test
@@ -606,41 +617,40 @@ public class TestParseFilter {
 
     PrefixFilter prefixFilter = (PrefixFilter)filters.get(0);
     byte [] prefix = prefixFilter.getPrefix();
-    assertEquals(new String(prefix), "realtime");
+    assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
 
     SkipFilter skipFilter = (SkipFilter)filters.get(1);
     assertTrue(skipFilter.getFilter() instanceof FirstKeyOnlyFilter);
   }
 
   @Test
-  public void testUnescapedQuote1 () throws IOException {
+  public void testUnescapedQuote1() throws IOException {
     String filterString = "InclusiveStopFilter ('row''3')";
     InclusiveStopFilter inclusiveStopFilter =
       doTestFilter(filterString, InclusiveStopFilter.class);
     byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
-    assertEquals(new String(stopRowKey), "row'3");
+    assertEquals("row'3", new String(stopRowKey, StandardCharsets.UTF_8));
   }
 
   @Test
-  public void testUnescapedQuote2 () throws IOException {
+  public void testUnescapedQuote2() throws IOException {
     String filterString = "InclusiveStopFilter ('row''3''')";
     InclusiveStopFilter inclusiveStopFilter =
       doTestFilter(filterString, InclusiveStopFilter.class);
     byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
-    assertEquals(new String(stopRowKey), "row'3'");
+    assertEquals("row'3'", new String(stopRowKey, StandardCharsets.UTF_8));
   }
 
   @Test
-  public void testUnescapedQuote3 () throws IOException {
+  public void testUnescapedQuote3() throws IOException {
     String filterString = " InclusiveStopFilter ('''')";
-    InclusiveStopFilter inclusiveStopFilter =
-      doTestFilter(filterString, InclusiveStopFilter.class);
+    InclusiveStopFilter inclusiveStopFilter = doTestFilter(filterString, InclusiveStopFilter.class);
     byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
-    assertEquals(new String(stopRowKey), "'");
+    assertEquals("'", new String(stopRowKey, StandardCharsets.UTF_8));
   }
 
   @Test
-  public void testIncorrectFilterString () throws IOException {
+  public void testIncorrectFilterString() throws IOException {
     String filterString = "()";
     byte [] filterStringAsByteArray = Bytes.toBytes(filterString);
     try {
@@ -652,10 +662,9 @@ public class TestParseFilter {
   }
 
   @Test
-  public void testCorrectFilterString () throws IOException {
+  public void testCorrectFilterString() throws IOException {
     String filterString = "(FirstKeyOnlyFilter())";
-    FirstKeyOnlyFilter firstKeyOnlyFilter =
-      doTestFilter(filterString, FirstKeyOnlyFilter.class);
+    FirstKeyOnlyFilter firstKeyOnlyFilter = doTestFilter(filterString, FirstKeyOnlyFilter.class);
   }
   
   @Test
@@ -665,7 +674,8 @@ public class TestParseFilter {
     assertTrue(f.getSupportedFilters().contains("MyFilter"));
   }
 
-  private <T extends Filter> T doTestFilter(String filterString, Class<T> clazz) throws IOException {
+  private <T extends Filter> T doTestFilter(String filterString, Class<T> clazz)
+      throws IOException {
     byte [] filterStringAsByteArray = Bytes.toBytes(filterString);
     filter = f.parseFilterString(filterStringAsByteArray);
     assertEquals(clazz, filter.getClass());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
index 764d033..c6b1b5f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
@@ -68,7 +68,7 @@ public class TestSingleColumnValueExcludeFilter {
 
     filter.filterRowCells(kvs);
 
-    assertEquals("resultSize", kvs.size(), 2);
+    assertEquals("resultSize", 2, kvs.size());
     assertTrue("leftKV1", CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0);
     assertTrue("leftKV2", CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0);
     assertFalse("allRemainingWhenMatch", filter.filterAllRemaining());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index b24d30b..5ba7dfa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hdfs.DFSClient;
@@ -248,7 +249,7 @@ public class TestBlockReorder {
    */
   @Test()
   public void testHBaseCluster() throws Exception {
-    byte[] sb = "sb".getBytes();
+    byte[] sb = Bytes.toBytes("sb");
     htu.startMiniZKCluster();
 
     MiniHBaseCluster hbm = htu.startMiniHBaseCluster(1, 1);
@@ -442,7 +443,7 @@ public class TestBlockReorder {
       do {
         l = getNamenode(dfs.getClient()).getBlockLocations(fileName, 0, 1);
         Assert.assertNotNull(l.getLocatedBlocks());
-        Assert.assertEquals(l.getLocatedBlocks().size(), 1);
+        Assert.assertEquals(1, l.getLocatedBlocks().size());
         Assert.assertTrue("Expecting " + repCount + " , got " + l.get(0).getLocations().length,
             System.currentTimeMillis() < max);
       } while (l.get(0).getLocations().length != repCount);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
index 33bac39..7747bdb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
@@ -125,7 +125,7 @@ public class TestHFileLink {
             HFileLink.parseBackReferenceName(encodedRegion+"."+
                 tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
         assertEquals(parsedRef.getFirst(), tableName);
-        assertEquals(parsedRef.getSecond(), encodedRegion);
+        assertEquals(encodedRegion, parsedRef.getSecond());
 
         //verify resolving back reference
         Path storeFileDir =  new Path(refLinkDir, encodedRegion+"."+

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index 94df090..b3148c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -546,9 +546,9 @@ public class TestHeapSize  {
       assertTrue(ClassSize.OBJECT == 12 || ClassSize.OBJECT == 16); // depending on CompressedOops
     }
     if (ClassSize.useUnsafeLayout()) {
-      assertEquals(ClassSize.OBJECT + 4, ClassSize.ARRAY);
+      assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 4);
     } else {
-      assertEquals(ClassSize.OBJECT + 8, ClassSize.ARRAY);
+      assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
index f43f147..82a50c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
@@ -72,16 +72,16 @@ public class TestBufferedDataBlockEncoder {
 
   @Test
   public void testCommonPrefixComparators() {
-    KeyValue kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put);
-    KeyValue kv2 = new KeyValue(row1, fam_1_2, qual1, 1l, Type.Maximum);
+    KeyValue kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
+    KeyValue kv2 = new KeyValue(row1, fam_1_2, qual1, 1L, Type.Maximum);
     assertTrue((BufferedDataBlockEncoder.compareCommonFamilyPrefix(kv1, kv2, 4) < 0));
 
-    kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put);
-    kv2 = new KeyValue(row_1_0, fam_1_2, qual1, 1l, Type.Maximum);
+    kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
+    kv2 = new KeyValue(row_1_0, fam_1_2, qual1, 1L, Type.Maximum);
     assertTrue((BufferedDataBlockEncoder.compareCommonRowPrefix(kv1, kv2, 4) < 0));
 
-    kv1 = new KeyValue(row1, fam1, qual2, 1l, Type.Put);
-    kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum);
+    kv1 = new KeyValue(row1, fam1, qual2, 1L, Type.Put);
+    kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum);
     assertTrue((BufferedDataBlockEncoder.compareCommonQualifierPrefix(kv1, kv2, 4) > 0));
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index f41db93..cbbc9dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -82,7 +82,7 @@ public class TestDataBlockEncoders {
   static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
 
   private RedundantKVGenerator generator = new RedundantKVGenerator();
-  private Random randomizer = new Random(42l);
+  private Random randomizer = new Random(42L);
 
   private final boolean includesMemstoreTS;
   private final boolean includesTags;
@@ -129,14 +129,14 @@ public class TestDataBlockEncoders {
     byte[] qualifier = new byte[0];
     byte[] value = new byte[0];
     if (!includesTags) {
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value));
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value));
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value));
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value));
     } else {
       byte[] metaValue1 = Bytes.toBytes("metaValue1");
       byte[] metaValue2 = Bytes.toBytes("metaValue2");
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value,
           new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value,
           new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
     }
     testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
@@ -158,13 +158,13 @@ public class TestDataBlockEncoders {
     if (includesTags) {
       byte[] metaValue1 = Bytes.toBytes("metaValue1");
       byte[] metaValue2 = Bytes.toBytes("metaValue2");
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value,
           new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
-      kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+      kvList.add(new KeyValue(row, family, qualifier, 0L, value,
           new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
     } else {
-      kvList.add(new KeyValue(row, family, qualifier, -1l, Type.Put, value));
-      kvList.add(new KeyValue(row, family, qualifier, -2l, Type.Put, value));
+      kvList.add(new KeyValue(row, family, qualifier, -1L, Type.Put, value));
+      kvList.add(new KeyValue(row, family, qualifier, -2L, Type.Put, value));
     }
     testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
index e62af9e..d46a553 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
@@ -61,10 +61,12 @@ public class TestLoadAndSwitchEncodeOnDisk extends
     conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
   }
 
+  @Override
   protected int numKeys() {
     return 3000;
   }
 
+  @Override
   @Test(timeout=TIMEOUT_MS)
   public void loadTest() throws Exception {
     Admin admin = TEST_UTIL.getAdmin();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
index e0d2a9b..d304e74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
@@ -97,12 +97,12 @@ public class TestSeekBeforeWithReverseScan {
     while (scanner.next(res)) {
       count++;
     }
-    assertEquals(Bytes.toString(res.get(0).getRowArray(), res.get(0).getRowOffset(), res.get(0)
-        .getRowLength()), "b");
-    assertEquals(Bytes.toString(res.get(1).getRowArray(), res.get(1).getRowOffset(), res.get(1)
-        .getRowLength()), "ab");
-    assertEquals(Bytes.toString(res.get(2).getRowArray(), res.get(2).getRowOffset(), res.get(2)
-        .getRowLength()), "a");
+    assertEquals("b", Bytes.toString(res.get(0).getRowArray(), res.get(0).getRowOffset(),
+        res.get(0).getRowLength()));
+    assertEquals("ab", Bytes.toString(res.get(1).getRowArray(), res.get(1).getRowOffset(),
+        res.get(1).getRowLength()));
+    assertEquals("a", Bytes.toString(res.get(2).getRowArray(), res.get(2).getRowOffset(),
+        res.get(2).getRowLength()));
     assertEquals(3, count);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
index 462f77a..4300387 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
@@ -302,7 +302,7 @@ public class CacheTestUtils {
 
     @Override
     public long heapSize() {
-      return 4 + buf.length;
+      return 4L + buf.length;
     }
 
     @Override
@@ -373,9 +373,10 @@ public class CacheTestUtils {
 
       String strKey;
       /* No conflicting keys */
-      for (strKey = new Long(rand.nextLong()).toString(); !usedStrings
-          .add(strKey); strKey = new Long(rand.nextLong()).toString())
-        ;
+      strKey = Long.toString(rand.nextLong());
+      while (!usedStrings.add(strKey)) {
+        strKey = Long.toString(rand.nextLong());
+      }
 
       returnedBlocks[i] = new HFileBlockPair();
       returnedBlocks[i].blockName = new BlockCacheKey(strKey, 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
index a133cb4..aaf1711 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
@@ -98,6 +98,7 @@ public class NanoTimer {
    *
    * Note: If timer is never started, "ERR" will be returned.
    */
+  @Override
   public String toString() {
     if (!readable()) {
       return "ERR";

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 611c524..6d3d4ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -297,7 +297,7 @@ public class TestCacheOnWrite {
         // block we cached at write-time and block read from file should be identical
         assertEquals(block.getChecksumType(), fromCache.getChecksumType());
         assertEquals(block.getBlockType(), fromCache.getBlockType());
-        assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
+        assertNotEquals(BlockType.ENCODED_DATA, block.getBlockType());
         assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
         assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
         assertEquals(

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index 9c36788..a8b7d1f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -124,7 +124,7 @@ public class TestHFileBlock {
   static int writeTestKeyValues(HFileBlock.Writer hbw, int seed, boolean includesMemstoreTS,
       boolean useTag) throws IOException {
     List<KeyValue> keyValues = new ArrayList<>();
-    Random randomizer = new Random(42l + seed); // just any fixed number
+    Random randomizer = new Random(42L + seed); // just any fixed number
 
     // generate keyValues
     for (int i = 0; i < NUM_KEYVALUES; ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index bec774e..a049b329 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -262,7 +262,7 @@ public class TestHFileEncryption {
           assertTrue("Initial seekTo failed", scanner.seekTo());
           for (i = 0; i < 100; i++) {
             KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
-            assertEquals("Unable to find KV as expected: " + kv, scanner.seekTo(kv), 0);
+            assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
           }
         } finally {
           scanner.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
index af169f5..8429ee8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
@@ -280,10 +280,10 @@ public class TestLruBlockCache {
     }
 
     // A single eviction run should have occurred
-    assertEquals(cache.getStats().getEvictionCount(), 1);
+    assertEquals(1, cache.getStats().getEvictionCount());
 
     // We expect two entries evicted
-    assertEquals(cache.getStats().getEvictedCount(), 2);
+    assertEquals(2, cache.getStats().getEvictedCount());
 
     // Our expected size overruns acceptable limit
     assertTrue(expectedCacheSize >

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index 9b2602f..3873a6c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -93,7 +93,7 @@ public class TestBucketCache {
   String ioEngineName = "offheap";
   String persistencePath = null;
 
-  private class MockedBucketCache extends BucketCache {
+  private static class MockedBucketCache extends BucketCache {
 
     public MockedBucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,
         int writerThreads, int writerQLen, String persistencePath) throws FileNotFoundException,
@@ -314,12 +314,18 @@ public class TestBucketCache {
     BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize,
         constructedBlockSizes, writeThreads, writerQLen, persistencePath, 100, conf);
 
-    assertEquals(BucketCache.ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getAcceptableFactor(), 0.9f, 0);
-    assertEquals(BucketCache.MIN_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMinFactor(), 0.5f, 0);
-    assertEquals(BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getExtraFreeFactor(), 0.5f, 0);
-    assertEquals(BucketCache.SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getSingleFactor(), 0.1f, 0);
-    assertEquals(BucketCache.MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMultiFactor(), 0.7f, 0);
-    assertEquals(BucketCache.MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMemoryFactor(), 0.2f, 0);
+    assertEquals(BucketCache.ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", 0.9f,
+        cache.getAcceptableFactor(), 0);
+    assertEquals(BucketCache.MIN_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f,
+        cache.getMinFactor(), 0);
+    assertEquals(BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f,
+        cache.getExtraFreeFactor(), 0);
+    assertEquals(BucketCache.SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.1f,
+        cache.getSingleFactor(), 0);
+    assertEquals(BucketCache.MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", 0.7f,
+        cache.getMultiFactor(), 0);
+    assertEquals(BucketCache.MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", 0.2f,
+        cache.getMemoryFactor(), 0);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
index 09429dd..0a168ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
@@ -148,7 +148,7 @@ public class TestNettyIPC extends AbstractTestIPC {
       super(server, name, services, bindAddress, conf, scheduler, true);
     }
 
-    final class FailingConnection extends NettyServerRpcConnection {
+    static final class FailingConnection extends NettyServerRpcConnection {
       private FailingConnection(TestFailingRpcServer rpcServer, Channel channel) {
         super(rpcServer, channel);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 83a7acc..f6f6fc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -112,7 +112,7 @@ public class TestProtoBufRpc {
       // Test echo method
       EchoRequestProto echoRequest = EchoRequestProto.newBuilder().setMessage("hello").build();
       EchoResponseProto echoResponse = stub.echo(null, echoRequest);
-      assertEquals(echoResponse.getMessage(), "hello");
+      assertEquals("hello", echoResponse.getMessage());
 
       stub.error(null, emptyRequest);
       fail("Expected exception is not thrown");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
index 9d2fd91..9e70c93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.ipc;
 import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-
 import java.io.IOException;
 import java.net.Socket;
 import java.net.SocketAddress;
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.MetricsConnection;
 import org.apache.hadoop.hbase.client.RetriesExhaustedException;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -50,6 +49,8 @@ import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
 @Category(MediumTests.class)
 public class TestRpcClientLeaks {
   @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
@@ -112,7 +113,7 @@ public class TestRpcClientLeaks {
     conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
     Connection connection = ConnectionFactory.createConnection(conf);
     Table table = connection.getTable(TableName.valueOf(name.getMethodName()));
-    table.get(new Get("asd".getBytes()));
+    table.get(new Get(Bytes.toBytes("asd")));
     connection.close();
     for (Socket socket : MyRpcClientImpl.savedSockets) {
       assertTrue("Socket + " +  socket + " is not closed", socket.isClosed());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index e4a8767..e646c14 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -166,7 +166,8 @@ public class TestSimpleRpcScheduler {
     for (String callQueueName:callQueueInfo.getCallQueueNames()) {
 
       for (String calledMethod: callQueueInfo.getCalledMethodNames(callQueueName)) {
-        assertEquals(callQueueInfo.getCallMethodCount(callQueueName, calledMethod), totalCallMethods);
+        assertEquals(totalCallMethods,
+            callQueueInfo.getCallMethodCount(callQueueName, calledMethod));
       }
 
     }
@@ -327,7 +328,7 @@ public class TestSimpleRpcScheduler {
 
     RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 2, 1, 1, priority,
                                                     HConstants.QOS_THRESHOLD);
-    assertNotEquals(scheduler, null);
+    assertNotEquals(null, scheduler);
   }
 
   @Test
@@ -571,6 +572,7 @@ public class TestSimpleRpcScheduler {
     };
 
     CallRunner cr = new CallRunner(null, putCall) {
+      @Override
       public void run() {
         if (sleepTime <= 0) return;
         try {
@@ -581,10 +583,12 @@ public class TestSimpleRpcScheduler {
         }
       }
 
+      @Override
       public RpcCall getRpcCall() {
         return putCall;
       }
 
+      @Override
       public void drop() {
       }
     };

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
index b080d7f..6b6f0de 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
@@ -77,6 +77,7 @@ abstract public class MapreduceTestingShim {
   }
   
   private static class MapreduceV1Shim extends MapreduceTestingShim {
+    @Override
     public JobContext newJobContext(Configuration jobConf) throws IOException {
       // Implementing:
       // return new JobContext(jobConf, new JobID());
@@ -105,6 +106,7 @@ abstract public class MapreduceTestingShim {
       }
     }
     
+    @Override
     public JobConf obtainJobConf(MiniMRCluster cluster) {
       if (cluster == null) return null;
       try {
@@ -129,6 +131,7 @@ abstract public class MapreduceTestingShim {
   };
 
   private static class MapreduceV2Shim extends MapreduceTestingShim {
+    @Override
     public JobContext newJobContext(Configuration jobConf) {
       return newJob(jobConf);
     }
@@ -147,6 +150,7 @@ abstract public class MapreduceTestingShim {
       }
     }
     
+    @Override
     public JobConf obtainJobConf(MiniMRCluster cluster) {
       try {
         Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index 16f98a0..53e80f3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -365,10 +365,8 @@ public class TestAssignmentListener {
     drainingServerTracker.start();
 
     // Confirm our ServerManager lists are empty.
-    Assert.assertEquals(serverManager.getOnlineServers(),
-        new HashMap<ServerName, ServerLoad>());
-    Assert.assertEquals(serverManager.getDrainingServersList(),
-        new ArrayList<ServerName>());
+    Assert.assertEquals(new HashMap<ServerName, ServerLoad>(), serverManager.getOnlineServers());
+    Assert.assertEquals(new ArrayList<ServerName>(), serverManager.getDrainingServersList());
 
     // checkAndRecordNewServer() is how servers are added to the ServerManager.
     ArrayList<ServerName> onlineDrainingServers = new ArrayList<>();
@@ -381,8 +379,7 @@ public class TestAssignmentListener {
     }
 
     // Verify the ServerManager lists are correctly updated.
-    Assert.assertEquals(serverManager.getOnlineServers(), onlineServers);
-    Assert.assertEquals(serverManager.getDrainingServersList(),
-        onlineDrainingServers);
+    Assert.assertEquals(onlineServers, serverManager.getOnlineServers());
+    Assert.assertEquals(onlineDrainingServers, serverManager.getDrainingServersList());
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index e8aa755..aa5ad1e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -190,7 +190,7 @@ public class TestMasterFailover {
       RegionState metaState = MetaTableLocator.getMetaRegionState(hrs.getZooKeeper());
       assertEquals("hbase:meta should be online on RS",
           metaState.getServerName(), metaServerName);
-      assertEquals("hbase:meta should be online on RS", metaState.getState(), State.OPEN);
+      assertEquals("hbase:meta should be online on RS", State.OPEN, metaState.getState());
 
       // Start up a new master
       LOG.info("Starting up a new master");
@@ -203,7 +203,7 @@ public class TestMasterFailover {
       metaState = MetaTableLocator.getMetaRegionState(activeMaster.getZooKeeper());
       assertEquals("hbase:meta should be online on RS",
           metaState.getServerName(), metaServerName);
-      assertEquals("hbase:meta should be online on RS", metaState.getState(), State.OPEN);
+      assertEquals("hbase:meta should be online on RS", State.OPEN, metaState.getState());
 
       // Done, shutdown the cluster
     } finally {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
index fd44c89..15c8b6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
@@ -189,6 +189,7 @@ public class TestMasterNoCluster {
         TESTUTIL.getConfiguration(), rs0, rs0, rs0.getServerName(),
         HRegionInfo.FIRST_META_REGIONINFO);
     HMaster master = new HMaster(conf) {
+      @Override
       InetAddress getRemoteInetAddress(final int port, final long serverStartCode)
       throws UnknownHostException {
         // Return different address dependent on port passed.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index cd7af50..e399f2e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.EnumSet;
@@ -242,7 +243,7 @@ public class TestMasterOperationsForRegionReplicas {
       }
       assert(defaultReplicas.size() == numRegions);
       Collection<Integer> counts = new HashSet<>(defaultReplicas.values());
-      assert(counts.size() == 1 && counts.contains(new Integer(numReplica)));
+      assert(counts.size() == 1 && counts.contains(numReplica));
     } finally {
       ADMIN.disableTable(tableName);
       ADMIN.deleteTable(tableName);
@@ -336,7 +337,7 @@ public class TestMasterOperationsForRegionReplicas {
         byte[] startKey = region.getStartKey();
         if (region.getTable().equals(table)) {
           setOfStartKeys.add(startKey); //ignore other tables
-          LOG.info("--STARTKEY " + new String(startKey)+"--");
+          LOG.info("--STARTKEY {}--", new String(startKey, StandardCharsets.UTF_8));
         }
       }
       // the number of startkeys will be equal to the number of regions hosted in each server

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
index 29c24f7..648ea64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
@@ -121,6 +121,7 @@ public class TestMasterShutdown {
     master.start();
     LOG.info("Called master start on " + master.getName());
     Thread shutdownThread = new Thread("Shutdown-Thread") {
+      @Override
       public void run() {
         LOG.info("Before call to shutdown master");
         try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
index ebfec22..2e6c699 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
@@ -88,8 +88,7 @@ public class TestMetaShutdownHandler {
     }
     RegionState metaState =
         MetaTableLocator.getMetaRegionState(master.getZooKeeper());
-    assertEquals("Meta should be not in transition",
-      metaState.getState(), RegionState.State.OPEN);
+    assertEquals("Meta should be not in transition", RegionState.State.OPEN, metaState.getState());
     assertNotEquals("Meta should be moved off master",
       metaServerName, master.getServerName());
 
@@ -115,8 +114,7 @@ public class TestMetaShutdownHandler {
       regionStates.isRegionOnline(HRegionInfo.FIRST_META_REGIONINFO));
     // Now, make sure meta is registered in zk
     metaState = MetaTableLocator.getMetaRegionState(master.getZooKeeper());
-    assertEquals("Meta should be not in transition",
-      metaState.getState(), RegionState.State.OPEN);
+    assertEquals("Meta should be not in transition", RegionState.State.OPEN, metaState.getState());
     assertEquals("Meta should be assigned", metaState.getServerName(),
       regionStates.getRegionServerOfRegion(HRegionInfo.FIRST_META_REGIONINFO));
     assertNotEquals("Meta should be assigned on a different server",

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index e99d533..2794952 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -389,8 +389,7 @@ public class TestRegionPlacement {
     lastRegionOpenedCount = currentRegionOpened;
 
     assertEquals("There are only " + regionMovement + " instead of "
-          + expected + " region movement for " + attempt + " attempts",
-          regionMovement, expected);
+          + expected + " region movement for " + attempt + " attempts", expected, regionMovement);
   }
 
   /**
@@ -469,6 +468,7 @@ public class TestRegionPlacement {
     final AtomicInteger totalRegionNum = new AtomicInteger(0);
     LOG.info("The start of region placement verification");
     MetaTableAccessor.Visitor visitor = new MetaTableAccessor.Visitor() {
+      @Override
       public boolean visit(Result result) throws IOException {
         try {
           @SuppressWarnings("deprecation")

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
index cd5239e..128d7ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
@@ -384,23 +384,23 @@ public class TestSplitLogManager {
   @Test (timeout=180000)
   public void testTaskResigned() throws Exception {
     LOG.info("TestTaskResigned - resubmit task node once in RESIGNED state");
-    assertEquals(tot_mgr_resubmit.sum(), 0);
+    assertEquals(0, tot_mgr_resubmit.sum());
     slm = new SplitLogManager(master, conf);
-    assertEquals(tot_mgr_resubmit.sum(), 0);
+    assertEquals(0, tot_mgr_resubmit.sum());
     TaskBatch batch = new TaskBatch();
     String tasknode = submitTaskAndWait(batch, "foo/1");
-    assertEquals(tot_mgr_resubmit.sum(), 0);
+    assertEquals(0, tot_mgr_resubmit.sum());
     final ServerName worker1 = ServerName.valueOf("worker1,1,1");
-    assertEquals(tot_mgr_resubmit.sum(), 0);
+    assertEquals(0, tot_mgr_resubmit.sum());
     SplitLogTask slt = new SplitLogTask.Resigned(worker1);
-    assertEquals(tot_mgr_resubmit.sum(), 0);
+    assertEquals(0, tot_mgr_resubmit.sum());
     ZKUtil.setData(zkw, tasknode, slt.toByteArray());
     ZKUtil.checkExists(zkw, tasknode);
     // Could be small race here.
     if (tot_mgr_resubmit.sum() == 0) {
       waitForCounter(tot_mgr_resubmit, 0, 1, to/2);
     }
-    assertEquals(tot_mgr_resubmit.sum(), 1);
+    assertEquals(1, tot_mgr_resubmit.sum());
 
     byte[] taskstate = ZKUtil.getData(zkw, tasknode);
     slt = SplitLogTask.parseFrom(taskstate);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
index 1f61ee7..5a75297 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
@@ -65,9 +65,8 @@ public class TestTableStateManager {
     TEST_UTIL.restartHBaseCluster(1);
 
     HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
-    Assert.assertEquals(
-        master.getTableStateManager().getTableState(tableName),
-        TableState.State.DISABLED);
+    Assert.assertEquals(TableState.State.DISABLED,
+        master.getTableStateManager().getTableState(tableName));
   }
 
   private void setTableStateInZK(ZKWatcher watcher, final TableName tableName,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
index 83fafff..9bd4443 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
@@ -107,10 +107,12 @@ public class MockMasterServices extends MockNoopMasterServices {
     this.walManager = new MasterWalManager(this);
     // Mock an AM.
     this.assignmentManager = new AssignmentManager(this, new MockRegionStateStore(this)) {
+      @Override
       public boolean isTableEnabled(final TableName tableName) {
         return true;
       }
 
+      @Override
       public boolean isTableDisabled(final TableName tableName) {
         return false;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
index 37d9820..d2a4020 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
@@ -501,8 +501,8 @@ public class TestSplitTableRegionProcedure {
         daughters.get(i),
         startRow,
         numRows,
-        ColumnFamilyName1.getBytes(),
-        ColumnFamilyName2.getBytes());
+        Bytes.toBytes(ColumnFamilyName1),
+        Bytes.toBytes(ColumnFamilyName2));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
index adf56b8..e180fb5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
@@ -159,6 +159,7 @@ public class BalancerTestBase {
     public MockMapping(Configuration conf) {
     }
 
+    @Override
     public List<String> resolve(List<String> names) {
       List<String> ret = new ArrayList<>(names.size());
       for (String name : names) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index 08b27ec..644de6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
@@ -392,6 +392,7 @@ public class TestLogsCleaner {
           .when(zk).getData("/hbase/replication/rs", null, new Stat());
     }
 
+    @Override
     public RecoverableZooKeeper getRecoverableZooKeeper() {
       return zk;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
index 2948701..85f0d1f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
@@ -351,6 +351,7 @@ public class TestReplicationHFileCleaner {
           .when(zk).getData("/hbase/replication/hfile-refs", null, new Stat());
     }
 
+    @Override
     public RecoverableZooKeeper getRecoverableZooKeeper() {
       return zk;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 6069041..6a71df3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -183,7 +183,7 @@ public class TestSnapshotFromMaster {
     DisabledTableSnapshotHandler mockHandler = Mockito.mock(DisabledTableSnapshotHandler.class);
     Mockito.when(mockHandler.getException()).thenReturn(null);
     Mockito.when(mockHandler.getSnapshot()).thenReturn(desc);
-    Mockito.when(mockHandler.isFinished()).thenReturn(new Boolean(true));
+    Mockito.when(mockHandler.isFinished()).thenReturn(Boolean.TRUE);
     Mockito.when(mockHandler.getCompletionTimestamp())
       .thenReturn(EnvironmentEdgeManager.currentTime());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
index 7f031cc..1f743db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.hamcrest.core.IsInstanceOf;
 import org.hamcrest.core.StringStartsWith;
 import org.junit.After;
@@ -60,6 +61,7 @@ import org.junit.rules.TestName;
 import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
@@ -105,8 +107,10 @@ public class TestLockProcedure {
     setupConf(UTIL.getConfiguration());
     UTIL.startMiniCluster(1);
     UTIL.getAdmin().createNamespace(NamespaceDescriptor.create(namespace).build());
-    UTIL.createTable(tableName1, new byte[][]{"fam".getBytes()}, new byte[][] {"1".getBytes()});
-    UTIL.createTable(tableName2, new byte[][]{"fam".getBytes()}, new byte[][] {"1".getBytes()});
+    UTIL.createTable(tableName1,
+        new byte[][]{ Bytes.toBytes("fam")}, new byte[][] {Bytes.toBytes("1")});
+    UTIL.createTable(tableName2,
+        new byte[][]{Bytes.toBytes("fam")}, new byte[][] {Bytes.toBytes("1")});
     masterRpcService = UTIL.getHBaseCluster().getMaster().getMasterRpcServices();
     procExec = UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor();
     tableRegions1 = UTIL.getAdmin().getRegions(tableName1);
@@ -195,7 +199,7 @@ public class TestLockProcedure {
       LockHeartbeatResponse response = masterRpcService.lockHeartbeat(null,
           LockHeartbeatRequest.newBuilder().setProcId(procId).build());
       if (response.getLockStatus() == LockHeartbeatResponse.LockStatus.LOCKED) {
-        assertEquals(response.getTimeoutMs(), HEARTBEAT_TIMEOUT);
+        assertEquals(HEARTBEAT_TIMEOUT, response.getTimeoutMs());
         LOG.debug(String.format("Proc id %s acquired lock.", procId));
         return true;
       }
@@ -349,7 +353,8 @@ public class TestLockProcedure {
     CountDownLatch latch = new CountDownLatch(1);
     // MasterRpcServices don't set latch with LockProcedure, so create one and submit it directly.
     LockProcedure lockProc = new LockProcedure(UTIL.getConfiguration(),
-        TableName.valueOf("table"), org.apache.hadoop.hbase.procedure2.LockType.EXCLUSIVE, "desc", latch);
+        TableName.valueOf("table"),
+        org.apache.hadoop.hbase.procedure2.LockType.EXCLUSIVE, "desc", latch);
     procExec.submitProcedure(lockProc);
     assertTrue(latch.await(2000, TimeUnit.MILLISECONDS));
     releaseLock(lockProc.getProcId());