You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by md...@apache.org on 2017/11/05 02:31:11 UTC

[2/2] hbase git commit: HBASE-18983 fixes from update error-prone to 2.1.1

HBASE-18983 fixes from update error-prone to 2.1.1


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/33ae6dce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/33ae6dce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/33ae6dce

Branch: refs/heads/branch-2
Commit: 33ae6dce426c9d13d76a6b7860e8b46b238eb166
Parents: d592b29
Author: Mike Drob <md...@apache.org>
Authored: Wed Oct 11 11:36:41 2017 -0500
Committer: Mike Drob <md...@apache.org>
Committed: Sat Nov 4 21:29:48 2017 -0500

----------------------------------------------------------------------
 .../hadoop/hbase/client/TestAsyncProcess.java   | 10 ++--
 .../hadoop/hbase/zookeeper/TestZKUtil.java      |  9 +--
 .../procedure2/RemoteProcedureDispatcher.java   |  2 +-
 .../hbase/regionserver/SplitLogWorker.java      |  3 +-
 .../org/apache/hadoop/hbase/TestCompare.java    | 61 --------------------
 .../hadoop/hbase/TestHRegionLocation.java       |  5 +-
 .../hbase/TestIPv6NIOServerSocketChannel.java   |  2 +-
 .../client/TestScannersFromClientSide2.java     |  1 +
 .../hbase/regionserver/TestHRegionInfo.java     | 29 +++++++++-
 .../hbase/regionserver/TestJoinedScanners.java  |  2 +-
 .../hbase/regionserver/TestStoreScanner.java    | 10 ++--
 .../hadoop/hbase/util/MultiThreadedUpdater.java |  6 +-
 .../hadoop/hbase/util/MultiThreadedWriter.java  |  2 +-
 13 files changed, 52 insertions(+), 90 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index f5166e0..a0f18f4 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
@@ -746,12 +747,12 @@ public class TestAsyncProcess {
     puts.add(createPut(1, true));
 
     for (int i = 0; i != controller.maxConcurrentTasksPerRegion; ++i) {
-      ap.incTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+      ap.incTaskCounters(Collections.singleton(hri1.getRegionName()), sn);
     }
     ap.submit(null, DUMMY_TABLE, puts, false, null, false);
     Assert.assertEquals(puts.size(), 1);
 
-    ap.decTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+    ap.decTaskCounters(Collections.singleton(hri1.getRegionName()), sn);
     ap.submit(null, DUMMY_TABLE, puts, false, null, false);
     Assert.assertEquals(0, puts.size());
     if (defaultClazz != null) {
@@ -966,7 +967,7 @@ public class TestAsyncProcess {
 
 
     for (int i = 0; i < 1000; i++) {
-      ap.incTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+      ap.incTaskCounters(Collections.singleton("dummy".getBytes()), sn);
     }
 
     final Thread myThread = Thread.currentThread();
@@ -997,7 +998,7 @@ public class TestAsyncProcess {
       public void run() {
         Threads.sleep(sleepTime);
         while (controller.tasksInProgress.get() > 0) {
-          ap.decTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+          ap.decTaskCounters(Collections.singleton("dummy".getBytes()), sn);
         }
       }
     };
@@ -1119,6 +1120,7 @@ public class TestAsyncProcess {
     Assert.assertEquals("the put should not been inserted.", 0, mutator.size());
   }
 
+  @SuppressWarnings("SelfComparison")
   @Test
   public void testAction() {
     Action action_0 = new Action(new Put(Bytes.toBytes("abc")), 10);

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
index 53dcdbc..6b1e1f0 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
@@ -96,6 +96,7 @@ public class TestZKUtil {
     Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user6"))));
   }
 
+  @Test(expected = KeeperException.SystemErrorException.class)
   public void testInterruptedDuringAction()
       throws ZooKeeperConnectionException, IOException, KeeperException, InterruptedException {
     final RecoverableZooKeeper recoverableZk = Mockito.mock(RecoverableZooKeeper.class);
@@ -107,12 +108,6 @@ public class TestZKUtil {
     };
     Mockito.doThrow(new InterruptedException()).when(recoverableZk)
         .getChildren(zkw.znodePaths.baseZNode, null);
-    try {
-      ZKUtil.listChildrenNoWatch(zkw, zkw.znodePaths.baseZNode);
-    } catch (KeeperException.SystemErrorException e) {
-      // expected
-      return;
-    }
-    Assert.fail("Should have thrown KeeperException but not");
+    ZKUtil.listChildrenNoWatch(zkw, zkw.znodePaths.baseZNode);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
index 5855d95..cff7e8c 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -174,7 +174,7 @@ public abstract class RemoteProcedureDispatcher<TEnv, TRemote extends Comparable
     }
     node.add(rp);
     // Check our node still in the map; could have been removed by #removeNode.
-    return nodeMap.contains(node);
+    return nodeMap.containsValue(node);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
index 9d00f1a..1f7c610 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
@@ -75,14 +75,13 @@ public class SplitLogWorker implements Runnable {
     this.server = server;
     this.conf = conf;
     this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();
-    this.server = server;
     coordination.init(server, conf, splitTaskExecutor, this);
   }
 
   public SplitLogWorker(final Server hserver, final Configuration conf,
       final RegionServerServices server, final LastSequenceId sequenceIdChecker,
       final WALFactory factory) {
-    this(server, conf, server, new TaskExecutor() {
+    this(hserver, conf, server, new TaskExecutor() {
       @Override
       public Status exec(String filename, RecoveryMode mode, CancelableProgressable p) {
         Path walDir;

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
deleted file mode 100644
index ed61350..0000000
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCompare.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.experimental.categories.Category;
-
-/**
- * Test comparing HBase objects.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestCompare extends TestCase {
-
-  /**
-   * Sort of HRegionInfo.
-   */
-  public void testHRegionInfo() {
-    HRegionInfo a = new HRegionInfo(TableName.valueOf("a"), null, null);
-    HRegionInfo b = new HRegionInfo(TableName.valueOf("b"), null, null);
-    assertTrue(a.compareTo(b) != 0);
-    HTableDescriptor t = new HTableDescriptor(TableName.valueOf("t"));
-    byte [] midway = Bytes.toBytes("midway");
-    a = new HRegionInfo(t.getTableName(), null, midway);
-    b = new HRegionInfo(t.getTableName(), midway, null);
-    assertTrue(a.compareTo(b) < 0);
-    assertTrue(b.compareTo(a) > 0);
-    assertEquals(a, a);
-    assertTrue(a.compareTo(a) == 0);
-    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("a"), Bytes.toBytes("d"));
-    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("e"), Bytes.toBytes("g"));
-    assertTrue(a.compareTo(b) < 0);
-    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("dddd"));
-    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("e"), Bytes.toBytes("g"));
-    assertTrue(a.compareTo(b) < 0);
-    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("dddd"));
-    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("eeee"));
-    assertTrue(a.compareTo(b) < 0);
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
index 14063df..6bbde23 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
@@ -63,6 +63,7 @@ public class TestHRegionLocation {
     System.out.println(hrl1.toString());
   }
 
+  @SuppressWarnings("SelfComparison")
   @Test
   public void testCompareTo() {
     ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
@@ -71,8 +72,8 @@ public class TestHRegionLocation {
     ServerName hsa2 = ServerName.valueOf("localhost", 1235, -1L);
     HRegionLocation hsl2 =
       new HRegionLocation(HRegionInfo.FIRST_META_REGIONINFO, hsa2);
-    assertTrue(hsl1.compareTo(hsl1) == 0);
-    assertTrue(hsl2.compareTo(hsl2) == 0);
+    assertEquals(0, hsl1.compareTo(hsl1));
+    assertEquals(0, hsl2.compareTo(hsl2));
     int compare1 = hsl1.compareTo(hsl2);
     int compare2 = hsl2.compareTo(hsl1);
     assertTrue((compare1 > 0)? compare2 < 0: compare2 > 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index d9df3e8..d4f4ada 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -124,7 +124,7 @@ public class TestIPv6NIOServerSocketChannel {
       //On Windows JDK6, we will get expected exception:
       //java.net.SocketException: Address family not supported by protocol family
       //or java.net.SocketException: Protocol family not supported
-      Assert.assertFalse(ex.getClass().isInstance(BindException.class));
+      Assert.assertFalse(ex instanceof BindException);
       Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
       LOG.info("Received expected exception:");
       LOG.info(ex);

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
index 4da94f2..ff57ca0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
@@ -217,6 +217,7 @@ public class TestScannersFromClientSide2 {
     testScan(456, false, 678, false, 200);
   }
 
+  @Test
   public void testReversedScanWithLimit() throws Exception {
     testReversedScan(998, true, 1, false, 900); // from last region to first region
     testReversedScan(543, true, 321, true, 100);

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
index 3dd7535..0257cc0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -169,8 +170,30 @@ public class TestHRegionInfo {
     HRegionInfo newer = new HRegionInfo(tableName, empty, empty, false, 1L);
     assertTrue(older.compareTo(newer) < 0);
     assertTrue(newer.compareTo(older) > 0);
-    assertTrue(older.compareTo(older) == 0);
-    assertTrue(newer.compareTo(newer) == 0);
+    assertEquals(0, older.compareTo(older));
+    assertEquals(0, newer.compareTo(newer));
+
+    HRegionInfo a = new HRegionInfo(TableName.valueOf("a"), null, null);
+    HRegionInfo b = new HRegionInfo(TableName.valueOf("b"), null, null);
+    assertNotEquals(0, a.compareTo(b));
+    HTableDescriptor t = new HTableDescriptor(TableName.valueOf("t"));
+    byte [] midway = Bytes.toBytes("midway");
+    a = new HRegionInfo(t.getTableName(), null, midway);
+    b = new HRegionInfo(t.getTableName(), midway, null);
+    assertTrue(a.compareTo(b) < 0);
+    assertTrue(b.compareTo(a) > 0);
+    assertEquals(a, a);
+    assertEquals(0, a.compareTo(a));
+    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("a"), Bytes.toBytes("d"));
+    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("e"), Bytes.toBytes("g"));
+    assertTrue(a.compareTo(b) < 0);
+    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("dddd"));
+    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("e"), Bytes.toBytes("g"));
+    assertTrue(a.compareTo(b) < 0);
+    a = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("dddd"));
+    b = new HRegionInfo(t.getTableName(), Bytes.toBytes("aaaa"), Bytes.toBytes("eeee"));
+    assertTrue(a.compareTo(b) < 0);
+
   }
 
   @Test
@@ -325,7 +348,7 @@ public class TestHRegionInfo {
       if (i != 1) {
         Assert.assertArrayEquals(regionNameParts[i], modifiedRegionNameParts[i]);
       } else {
-        Assert.assertNotEquals(regionNameParts[i][0], modifiedRegionNameParts[i][0]);
+        assertNotEquals(regionNameParts[i][0], modifiedRegionNameParts[i][0]);
         Assert.assertArrayEquals(modifiedRegionNameParts[1],
             HRegionInfo.getStartKeyForDisplay(h, conf));
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
index 83810f2..60ed50b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
@@ -129,7 +129,7 @@ public class TestJoinedScanners {
           puts.clear();
         }
       }
-      if (puts.size() >= 0) {
+      if (!puts.isEmpty()) {
         ht.put(puts);
         puts.clear();
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index b275e93..f9e4ea9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -599,6 +600,7 @@ public class TestStoreScanner {
     }
   }
 
+  @Test
   public void testDeleteVersionMaskingMultiplePuts() throws IOException {
     long now = System.currentTimeMillis();
     KeyValue [] kvs1 = new KeyValue[] {
@@ -623,6 +625,8 @@ public class TestStoreScanner {
       assertEquals(kvs2[1], results.get(0));
     }
   }
+
+  @Test
   public void testDeleteVersionsMixedAndMultipleVersionReturn() throws IOException {
     long now = System.currentTimeMillis();
     KeyValue [] kvs1 = new KeyValue[] {
@@ -834,10 +838,8 @@ public class TestStoreScanner {
   }
 
 
-  /**
-   * TODO this fails, since we don't handle deletions, etc, in peek
-   */
-  public void SKIP_testPeek() throws Exception {
+  @Test @Ignore("this fails, since we don't handle deletions, etc, in peek")
+  public void testPeek() throws Exception {
     KeyValue[] kvs = new KeyValue [] {
         create("R1", "cf", "a", 1, KeyValue.Type.Put, "dont-care"),
         create("R1", "cf", "a", 1, KeyValue.Type.Delete, "dont-care"),

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index 0cc4143..3f8b449 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -167,7 +167,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
                 get = dataGenerator.beforeGet(rowKeyBase, get);
               } catch (Exception e) {
                 // Ideally wont happen
-                LOG.warn("Failed to modify the get from the load generator  = [" + get.getRow()
+                LOG.warn("Failed to modify the get from the load generator  = [" + Bytes.toString(get.getRow())
                     + "], column family = [" + Bytes.toString(cf) + "]", e);
               }
               Result result = getRow(get, rowKeyBase, cf);
@@ -234,7 +234,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
             if (isBatchUpdate) {
               if (verbose) {
                 LOG.debug("Preparing increment and append for key = ["
-                  + rowKey + "], " + columnCount + " columns");
+                  + Bytes.toString(rowKey) + "], " + columnCount + " columns");
               }
               mutate(table, inc, rowKeyBase);
               mutate(table, app, rowKeyBase);
@@ -267,7 +267,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
         result = table.get(get);
       } catch (IOException ie) {
         LOG.warn(
-            "Failed to get the row for key = [" + get.getRow() + "], column family = ["
+            "Failed to get the row for key = [" + Bytes.toString(get.getRow()) + "], column family = ["
                 + Bytes.toString(cf) + "]", ie);
       }
       return result;

http://git-wip-us.apache.org/repos/asf/hbase/blob/33ae6dce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
index d62f72d..07e9cc8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
@@ -123,7 +123,7 @@ public class MultiThreadedWriter extends MultiThreadedWriterBase {
           }
           if (isMultiPut) {
             if (verbose) {
-              LOG.debug("Preparing put for key = [" + rowKey + "], " + columnCount + " columns");
+              LOG.debug("Preparing put for key = [" + Bytes.toString(rowKey) + "], " + columnCount + " columns");
             }
             insert(table, put, rowKeyBase);
             numCols.addAndGet(columnCount);