You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ra...@apache.org on 2015/07/13 18:37:22 UTC

[2/3] hbase git commit: HBASE-14047 - Cleanup deprecated APIs from Cell class (Ashish Singhi)

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
index b892512..0cbbcef 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
@@ -25,9 +25,10 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.wal.WAL.Entry;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.wal.WAL.Entry;
 
 public class TableCfWALEntryFilter implements WALEntryFilter {
 
@@ -62,7 +63,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
         Cell cell = cells.get(i);
         // ignore(remove) kv if its cf isn't in the replicable cf list
         // (empty cfs means all cfs of this table are replicable)
-        if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) {
+        if ((cfs != null && !cfs.contains(Bytes.toString(CellUtil.cloneFamily(cell))))) {
           cells.remove(i);
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
index c75f81f..b3db0f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseIOException;
 import org.apache.hadoop.hbase.HConstants;
@@ -354,7 +355,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
       }
 
       sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(),
-        entries.get(0).getEdit().getCells().get(0).getRow(), entries);
+        CellUtil.cloneRow(entries.get(0).getEdit().getCells().get(0)), entries);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index 5b0f469..b396dfc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -244,7 +244,7 @@ public class Replication extends WALActionsListener.Base implements
         new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
     byte[] family;
     for (Cell cell : logEdit.getCells()) {
-      family = cell.getFamily();
+      family = CellUtil.cloneFamily(cell);
       // This is expected and the KV should not be replicated
       if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;
       // Unexpected, has a tendency to happen in unit tests

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 7e9299a..8bd69a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -329,9 +329,9 @@ public class AccessController extends BaseMasterAndRegionObserver
             List<KeyValue> kvList = (List<KeyValue>)family.getValue();
             for (KeyValue kv : kvList) {
               if (!authManager.authorize(user, tableName, family.getKey(),
-                      kv.getQualifier(), permRequest)) {
-                return AuthResult.deny(request, "Failed qualifier check", user,
-                    permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier()));
+                CellUtil.cloneQualifier(kv), permRequest)) {
+                return AuthResult.deny(request, "Failed qualifier check", user, permRequest,
+                  tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv)));
               }
             }
           }
@@ -749,7 +749,7 @@ public class AccessController extends BaseMasterAndRegionObserver
           }
         }
       } else if (entry.getValue() == null) {
-        get.addFamily(col);        
+        get.addFamily(col);
       } else {
         throw new RuntimeException("Unhandled collection type " +
           entry.getValue().getClass().getName());
@@ -1308,7 +1308,7 @@ public class AccessController extends BaseMasterAndRegionObserver
   @Override
   public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx,
       NamespaceDescriptor ns) throws IOException {
-    // We require only global permission so that 
+    // We require only global permission so that
     // a user with NS admin cannot altering namespace configurations. i.e. namespace quota
     requireGlobalPermission("modifyNamespace", Action.ADMIN, ns.getName());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 0e20903..0c3ff83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -393,7 +393,7 @@ public class HBaseFsck extends Configured implements Closeable {
           LOG.info("Failed to create lock file " + hbckLockFilePath.getName()
               + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "
               + retryCounter.getMaxAttempts());
-          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(), 
+          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),
               ioe);
           try {
             exception = ioe;
@@ -880,7 +880,7 @@ public class HBaseFsck extends Configured implements Closeable {
           hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
           hf.loadFileInfo();
           Cell startKv = hf.getFirstKey();
-          start = startKv.getRow();
+          start = CellUtil.cloneRow(startKv);
           Cell endKv = hf.getLastKey();
           end = CellUtil.cloneRow(endKv);
         } catch (IOException ioe) {
@@ -2685,10 +2685,10 @@ public class HBaseFsck extends Configured implements Closeable {
         }
         regionsFromMeta = Ordering.natural().immutableSortedCopy(regions);
       }
-      
+
       return regionsFromMeta;
     }
-    
+
     private class IntegrityFixSuggester extends TableIntegrityErrorHandlerImpl {
       ErrorReporter errors;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
index e579164..4c55cb3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
@@ -33,8 +33,6 @@ import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -43,14 +41,14 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.codehaus.jackson.map.ObjectMapper;
 
-import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
-// imports for things that haven't moved yet.
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-
 /**
  * WALPrettyPrinter prints the contents of a given WAL with a variety of
  * options affecting formatting and extent of content.
@@ -245,7 +243,7 @@ public class WALPrettyPrinter {
     }
 
     WAL.Reader log = WALFactory.createReader(fs, p, conf);
-    
+
     if (log instanceof ProtobufLogReader) {
       List<String> writerClsNames = ((ProtobufLogReader) log).getWriterClsNames();
       if (writerClsNames != null && writerClsNames.size() > 0) {
@@ -258,18 +256,18 @@ public class WALPrettyPrinter {
         }
         out.println();
       }
-      
+
       String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName();
       if (cellCodecClsName != null) {
         out.println("Cell Codec Class: " + cellCodecClsName);
       }
     }
-    
+
     if (outputJSON && !persistentOutput) {
       out.print("[");
       firstTxn = true;
     }
-    
+
     try {
       WAL.Entry entry;
       while ((entry = log.next()) != null) {
@@ -288,7 +286,7 @@ public class WALPrettyPrinter {
         for (Cell cell : edit.getCells()) {
           // add atomic operation to txn
           Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
-          if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue()));
+          if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell)));
           // check row output filter
           if (row == null || ((String) op.get("row")).equals(row)) {
             actions.add(op);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 1b3fcf2..0dd8bea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -3744,11 +3744,11 @@ public class TestFromClientSide {
     // KeyValue v1 expectation.  Cast for now until we go all Cell all the time. TODO
     KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0);
 
-    assertTrue(Bytes.equals(kv.getFamily(), CONTENTS_FAMILY));
+    assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), CONTENTS_FAMILY));
     // will it return null or an empty byte array?
-    assertTrue(Bytes.equals(kv.getQualifier(), new byte[0]));
+    assertTrue(Bytes.equals(CellUtil.cloneQualifier(kv), new byte[0]));
 
-    assertTrue(Bytes.equals(kv.getValue(), value));
+    assertTrue(Bytes.equals(CellUtil.cloneValue(kv), value));
 
     table.put(put);
 
@@ -5335,7 +5335,7 @@ public class TestFromClientSide {
       assertEquals(1, regionsList.size());
     }
   }
-  
+
   private List<HRegionLocation> getRegionsInRange(TableName tableName, byte[] startKey,
       byte[] endKey) throws IOException {
     List<HRegionLocation> regionsInRange = new ArrayList<HRegionLocation>();
@@ -5778,8 +5778,11 @@ public class TestFromClientSide {
     int expectedIndex = 5;
     for (Result result : scanner) {
       assertEquals(result.size(), 1);
-      assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[expectedIndex]));
-      assertTrue(Bytes.equals(result.rawCells()[0].getQualifier(), QUALIFIERS[expectedIndex]));
+      Cell c = result.rawCells()[0];
+      assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
+        ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
+      assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(),
+        c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
       expectedIndex--;
     }
     assertEquals(expectedIndex, 0);
@@ -5817,7 +5820,7 @@ public class TestFromClientSide {
     for (Result result : ht.getScanner(scan)) {
       assertEquals(result.size(), 1);
       assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
-      assertEquals(Bytes.toInt(result.rawCells()[0].getValue()), VALUE.length);
+      assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
       count++;
     }
     assertEquals(count, 10);
@@ -6099,15 +6102,15 @@ public class TestFromClientSide {
     result = scanner.next();
     assertTrue("Expected 2 keys but received " + result.size(),
         result.size() == 2);
-    assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[4]));
-    assertTrue(Bytes.equals(result.rawCells()[1].getRow(), ROWS[4]));
-    assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[1]));
-    assertTrue(Bytes.equals(result.rawCells()[1].getValue(), VALUES[2]));
+    assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[4]));
+    assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[1]), ROWS[4]));
+    assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[1]));
+    assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[1]), VALUES[2]));
     result = scanner.next();
     assertTrue("Expected 1 key but received " + result.size(),
         result.size() == 1);
-    assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[3]));
-    assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[0]));
+    assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[3]));
+    assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[0]));
     scanner.close();
     ht.close();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index c46056d..9be2f64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -27,6 +27,7 @@ import java.util.ConcurrentModificationException;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -106,8 +107,8 @@ public class TestPutDeleteEtcCellIteration {
       Cell cell = cellScanner.current();
       byte [] bytes = Bytes.toBytes(index++);
       KeyValue kv = (KeyValue)cell;
-      assertTrue(Bytes.equals(kv.getFamily(), bytes));
-      assertTrue(Bytes.equals(kv.getValue(), bytes));
+      assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
+      assertTrue(Bytes.equals(CellUtil.cloneValue(kv), bytes));
     }
     assertEquals(COUNT, index);
   }
@@ -125,8 +126,8 @@ public class TestPutDeleteEtcCellIteration {
       int value = index;
       byte [] bytes = Bytes.toBytes(index++);
       KeyValue kv = (KeyValue)cell;
-      assertTrue(Bytes.equals(kv.getFamily(), bytes));
-      long a = Bytes.toLong(kv.getValue());
+      assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
+      long a = Bytes.toLong(CellUtil.cloneValue(kv));
       assertEquals(value, a);
     }
     assertEquals(COUNT, index);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
index dd8c7b4..6e1a038 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
@@ -20,19 +20,19 @@
 package org.apache.hadoop.hbase.coprocessor;
 
 import java.io.IOException;
-import java.util.List;
 import java.util.Arrays;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.wal.WALKey;
 
 /**
  * Class for testing WALObserver coprocessor.
@@ -119,8 +119,8 @@ implements WALObserver {
     Cell deletedCell = null;
     for (Cell cell : cells) {
       // assume only one kv from the WALEdit matches.
-      byte[] family = cell.getFamily();
-      byte[] qulifier = cell.getQualifier();
+      byte[] family = CellUtil.cloneFamily(cell);
+      byte[] qulifier = CellUtil.cloneQualifier(cell);
 
       if (Arrays.equals(family, ignoredFamily) &&
           Arrays.equals(qulifier, ignoredQualifier)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
index 601db76..6707354 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
@@ -66,10 +66,10 @@ import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.wal.WALKey;
 
 import com.google.common.collect.ImmutableList;
 
@@ -221,13 +221,13 @@ public class SimpleRegionObserver extends BaseRegionObserver {
       List<Mutation> metaEntries) throws IOException {
     ctPreSplitBeforePONR.incrementAndGet();
   }
-  
+
   @Override
   public void preSplitAfterPONR(
       ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException {
     ctPreSplitAfterPONR.incrementAndGet();
   }
-  
+
   @Override
   public void postSplit(ObserverContext<RegionCoprocessorEnvironment> c, Region l, Region r) {
     ctPostSplit.incrementAndGet();
@@ -370,7 +370,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   }
 
   @Override
-  public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, 
+  public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Put put, final WALEdit edit,
       final Durability durability) throws IOException {
     Map<byte[], List<Cell>> familyMap  = put.getFamilyCellMap();
@@ -384,20 +384,23 @@ public class SimpleRegionObserver extends BaseRegionObserver {
       assertNotNull(cells);
       assertNotNull(cells.get(0));
       KeyValue kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(),
-          TestRegionObserverInterface.A));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
+        TestRegionObserverInterface.A.length));
       cells = familyMap.get(TestRegionObserverInterface.B);
       assertNotNull(cells);
       assertNotNull(cells.get(0));
       kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(),
-          TestRegionObserverInterface.B));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
+        TestRegionObserverInterface.B.length));
       cells = familyMap.get(TestRegionObserverInterface.C);
       assertNotNull(cells);
       assertNotNull(cells.get(0));
       kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(),
-          TestRegionObserverInterface.C));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
+        TestRegionObserverInterface.C.length));
     }
     ctPrePut.incrementAndGet();
   }
@@ -418,25 +421,31 @@ public class SimpleRegionObserver extends BaseRegionObserver {
       assertNotNull(cells.get(0));
       // KeyValue v1 expectation.  Cast for now until we go all Cell all the time. TODO
       KeyValue kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.A));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
+        TestRegionObserverInterface.A.length));
       cells = familyMap.get(TestRegionObserverInterface.B);
       assertNotNull(cells);
       assertNotNull(cells.get(0));
       // KeyValue v1 expectation.  Cast for now until we go all Cell all the time. TODO
       kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.B));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
+        TestRegionObserverInterface.B.length));
       cells = familyMap.get(TestRegionObserverInterface.C);
       assertNotNull(cells);
       assertNotNull(cells.get(0));
       // KeyValue v1 expectation.  Cast for now until we go all Cell all the time. TODO
       kv = (KeyValue)cells.get(0);
-      assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.C));
+      assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
+        TestRegionObserverInterface.C.length));
     }
     ctPostPut.incrementAndGet();
   }
 
   @Override
-  public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c, 
+  public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Delete delete, final WALEdit edit,
       final Durability durability) throws IOException {
     Map<byte[], List<Cell>> familyMap  = delete.getFamilyCellMap();
@@ -456,7 +465,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   }
 
   @Override
-  public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> c, 
+  public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
       final Delete delete, final WALEdit edit,
       final Durability durability) throws IOException {
     Map<byte[], List<Cell>> familyMap  = delete.getFamilyCellMap();
@@ -467,7 +476,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
     ctBeforeDelete.set(0);
     ctPostDeleted.incrementAndGet();
   }
-  
+
   @Override
   public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
       MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
@@ -604,7 +613,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   }
 
   @Override
-  public Result preAppendAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e, 
+  public Result preAppendAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e,
       Append append) throws IOException {
     ctPreAppendAfterRowLock.incrementAndGet();
     return null;
@@ -724,7 +733,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   public boolean hadPostPut() {
     return ctPostPut.get() > 0;
   }
-  
+
   public boolean hadPreBatchMutate() {
     return ctPreBatchMutate.get() > 0;
   }
@@ -784,7 +793,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   public boolean hadPreIncrement() {
     return ctPreIncrement.get() > 0;
   }
-  
+
   public boolean hadPreIncrementAfterRowLock() {
     return ctPreIncrementAfterRowLock.get() > 0;
   }
@@ -808,7 +817,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   public boolean hadPrePreparedDeleteTS() {
     return ctPrePrepareDeleteTS.get() > 0;
   }
-  
+
   public boolean hadPreWALRestore() {
     return ctPreWALRestore.get() > 0;
   }
@@ -874,7 +883,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
   public int getCtPreSplit() {
     return ctPreSplit.get();
   }
-  
+
   public int getCtPreSplitBeforePONR() {
     return ctPreSplitBeforePONR.get();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index cdcdeed..a3c106d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -51,11 +52,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.DefaultWALProvider;
-import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALFactory;
-import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -63,14 +59,19 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.wal.DefaultWALProvider;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.TestName;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 
 /**
  * Tests invocation of the
@@ -216,14 +217,14 @@ public class TestWALObserver {
     List<Cell> cells = edit.getCells();
 
     for (Cell cell : cells) {
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
         foundFamily0 = true;
       }
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
         foundFamily2 = true;
       }
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
-        if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
+        if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
           modifiedFamily1 = true;
         }
       }
@@ -244,14 +245,14 @@ public class TestWALObserver {
     foundFamily2 = false;
     modifiedFamily1 = false;
     for (Cell cell : cells) {
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
         foundFamily0 = true;
       }
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
         foundFamily2 = true;
       }
-      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
-        if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
+      if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
+        if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
           modifiedFamily1 = true;
         }
       }
@@ -462,7 +463,7 @@ public class TestWALObserver {
   /*
    * Creates an HRI around an HTD that has <code>tableName</code> and three
    * column families named.
-   * 
+   *
    * @param tableName Name of table to use when we create HTableDescriptor.
    */
   private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) {
@@ -496,7 +497,7 @@ public class TestWALObserver {
 
   /**
    * Copied from HRegion.
-   * 
+   *
    * @param familyMap
    *          map of family->edits
    * @param walEdit

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 3601b01..92be81a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -534,7 +534,7 @@ public class TestFilter {
       ArrayList<Cell> values = new ArrayList<Cell>();
       boolean isMoreResults = scanner.next(values);
       if (!isMoreResults
-          || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) {
+          || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) {
         Assert.assertTrue(
             "The WhileMatchFilter should now filter all remaining",
             filter.filterAllRemaining());
@@ -581,7 +581,7 @@ public class TestFilter {
 
 
   /**
-   * The following filter simulates a pre-0.96 filter where filterRow() is defined while 
+   * The following filter simulates a pre-0.96 filter where filterRow() is defined while
    * hasFilterRow() returns false
    */
   static class OldTestFilter extends FilterBase {
@@ -592,25 +592,25 @@ public class TestFilter {
     public boolean hasFilterRow() {
       return false;
     }
-    
+
     @Override
     public boolean filterRow() {
       // always filter out rows
       return true;
     }
-    
+
     @Override
     public ReturnCode filterKeyValue(Cell ignored) throws IOException {
       return ReturnCode.INCLUDE;
     }
   }
-  
+
   /**
-   * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in 
-   * 0.96+ code base.  
-   * 
+   * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in
+   * 0.96+ code base.
+   *
    * See HBASE-10366
-   * 
+   *
    * @throws Exception
    */
   @Test
@@ -1558,7 +1558,7 @@ public class TestFilter {
     };
 
     for(KeyValue kv : srcKVs) {
-      Put put = new Put(kv.getRow()).add(kv);
+      Put put = new Put(CellUtil.cloneRow(kv)).add(kv);
       put.setDurability(Durability.SKIP_WAL);
       this.region.put(put);
     }
@@ -1597,7 +1597,7 @@ public class TestFilter {
 
     // Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0]
     KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]);
-    this.region.put(new Put(kvA.getRow()).add(kvA));
+    this.region.put(new Put(CellUtil.cloneRow(kvA)).add(kvA));
 
     // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
     // Expect 1 row (3)
@@ -1971,7 +1971,7 @@ public class TestFilter {
       verifyScanFullNoValues(s, expectedKVs, useLen);
     }
   }
-  
+
   /**
    * Filter which makes sleeps for a second between each row of a scan.
    * This can be useful for manual testing of bugs like HBASE-5973. For example:
@@ -1984,7 +1984,7 @@ public class TestFilter {
    */
   public static class SlowScanFilter extends FilterBase {
     private static Thread ipcHandlerThread = null;
-    
+
     @Override
     public byte [] toByteArray() {return null;}
 
@@ -2099,5 +2099,5 @@ public class TestFilter {
     WAL wal = ((HRegion)testRegion).getWAL();
     ((HRegion)testRegion).close();
     wal.close();
-  }      
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 8854efe..440c9f5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -266,7 +266,7 @@ public class TestFilterList {
     byte[] r1 = Bytes.toBytes("Row1");
     byte[] r11 = Bytes.toBytes("Row11");
     byte[] r2 = Bytes.toBytes("Row2");
-  
+
     FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
     flist.addFilter(new PrefixFilter(r1));
     flist.filterRowKey(KeyValueUtil.createFirstOnRow(r1));
@@ -276,7 +276,7 @@ public class TestFilterList {
     flist.reset();
     flist.filterRowKey(KeyValueUtil.createFirstOnRow(r2));
     assertEquals(flist.filterKeyValue(new KeyValue(r2,r2,r2)), ReturnCode.SKIP);
-  
+
     flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
     flist.addFilter(new AlwaysNextColFilter());
     flist.addFilter(new PrefixFilter(r1));
@@ -298,7 +298,7 @@ public class TestFilterList {
     byte[] r1 = Bytes.toBytes("Row1");
     byte[] r11 = Bytes.toBytes("Row11");
     byte[] r2 = Bytes.toBytes("Row2");
-  
+
     FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
     flist.addFilter(new AlwaysNextColFilter());
     flist.addFilter(new InclusiveStopFilter(r1));
@@ -390,7 +390,7 @@ public class TestFilterList {
         Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
     // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
     assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null));
-    // INCLUDE, SKIP, INCLUDE. 
+    // INCLUDE, SKIP, INCLUDE.
     assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null));
 
     // Check must pass all filter.
@@ -398,7 +398,7 @@ public class TestFilterList {
         Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
     // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
     assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null));
-    // INCLUDE, SKIP, INCLUDE. 
+    // INCLUDE, SKIP, INCLUDE.
     assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null));
   }
 
@@ -417,7 +417,7 @@ public class TestFilterList {
       public byte [] toByteArray() {
         return null;
       }
-      
+
       @Override
       public ReturnCode filterKeyValue(Cell ignored) throws IOException {
         return ReturnCode.INCLUDE;
@@ -541,12 +541,13 @@ public class TestFilterList {
     // Value for fam:qual1 should be stripped:
     assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1));
     final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1));
-    assertEquals(0, transformedQual1.getValue().length);
+    assertEquals(0, transformedQual1.getValueLength());
 
     // Value for fam:qual2 should not be stripped:
     assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2));
     final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2));
-    assertEquals("value", Bytes.toString(transformedQual2.getValue()));
+    assertEquals("value", Bytes.toString(transformedQual2.getValueArray(),
+      transformedQual2.getValueOffset(), transformedQual2.getValueLength()));
 
     // Other keys should be skipped:
     assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3));

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
index 5d7fa3d..0e5f08e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
@@ -102,7 +103,7 @@ public class TestHalfStoreFileReader {
     HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
     r.loadFileInfo();
     Cell midKV = r.midkey();
-    byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow();
+    byte[] midkey = CellUtil.cloneRow(midKV);
 
     //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
 
@@ -167,7 +168,7 @@ public class TestHalfStoreFileReader {
       HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
       r.loadFileInfo();
       Cell midKV = r.midkey();
-      byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow();
+      byte[] midkey = CellUtil.cloneRow(midKV);
 
       Reference bottom = new Reference(midkey, Reference.Range.bottom);
       Reference top = new Reference(midkey, Reference.Range.top);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
index 2d478a4..91115c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
@@ -37,6 +37,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.Tag;
@@ -74,7 +75,7 @@ public class TestPrefixTreeEncoding {
       CellComparator.COMPARATOR);
 
   private static boolean formatRowNum = false;
-  
+
   @Parameters
   public static Collection<Object[]> parameters() {
     List<Object[]> paramList = new ArrayList<Object[]>();
@@ -88,7 +89,7 @@ public class TestPrefixTreeEncoding {
   public TestPrefixTreeEncoding(boolean includesTag) {
     this.includesTag = includesTag;
   }
- 
+
   @Before
   public void setUp() throws Exception {
     kvset.clear();
@@ -132,7 +133,8 @@ public class TestPrefixTreeEncoding {
         new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
             .getKeyLength()), true);
     assertNotNull(seeker.getKeyValue());
-    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker.getKeyValue().getRow());
+    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1),
+      CellUtil.cloneRow(seeker.getKeyValue()));
 
     // Seek before the last keyvalue;
     seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
@@ -140,7 +142,8 @@ public class TestPrefixTreeEncoding {
         new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
             .getKeyLength()), true);
     assertNotNull(seeker.getKeyValue());
-    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow());
+    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1),
+      CellUtil.cloneRow(seeker.getKeyValue()));
   }
 
   @Test
@@ -226,7 +229,7 @@ public class TestPrefixTreeEncoding {
         onDiskBytes.length - DataBlockEncoding.ID_SIZE);
     verifySeeking(seeker, readBuffer, batchId);
   }
-  
+
   private void verifySeeking(EncodedSeeker encodeSeeker,
       ByteBuffer encodedData, int batchId) {
     List<KeyValue> kvList = new ArrayList<KeyValue>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
index fa0cfec..af8a6cc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
@@ -18,9 +18,6 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -242,7 +239,7 @@ public class TestHFile extends HBaseTestCase {
 
   /**
    * test none codecs
-   * @param useTags 
+   * @param useTags
    */
   void basicWithSomeCodec(String codec, boolean useTags) throws IOException {
     if (useTags) {
@@ -311,12 +308,12 @@ public class TestHFile extends HBaseTestCase {
       writer.appendMetaBlock("HFileMeta" + i, new Writable() {
         private int val;
         public Writable setVal(int val) { this.val = val; return this; }
-        
+
         @Override
         public void write(DataOutput out) throws IOException {
           out.write(("something to test" + val).getBytes());
         }
-        
+
         @Override
         public void readFields(DataInput in) throws IOException { }
       }.setVal(i));
@@ -330,7 +327,7 @@ public class TestHFile extends HBaseTestCase {
   private void readNumMetablocks(Reader reader, int n) throws IOException {
     for (int i = 0; i < n; i++) {
       ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false);
-      ByteBuffer expected = 
+      ByteBuffer expected =
         ByteBuffer.wrap(("something to test" + i).getBytes());
       assertEquals("failed to match metadata",
         Bytes.toStringBinary(expected), Bytes.toStringBinary(actual));
@@ -377,7 +374,7 @@ public class TestHFile extends HBaseTestCase {
   @Test
   public void testNullMetaBlocks() throws Exception {
     if (cacheConf == null) cacheConf = new CacheConfig(conf);
-    for (Compression.Algorithm compressAlgo : 
+    for (Compression.Algorithm compressAlgo :
         HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
       Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
       FSDataOutputStream fout = createFSOutput(mFile);
@@ -512,8 +509,8 @@ public class TestHFile extends HBaseTestCase {
     newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
     assertTrue(keyComparator.compare(kv1, newKey) < 0);
     assertTrue((keyComparator.compare(kv2, newKey)) > 0);
-    assertTrue(Arrays.equals(newKey.getFamily(), family));
-    assertTrue(Arrays.equals(newKey.getQualifier(), qualB));
+    assertTrue(Arrays.equals(CellUtil.cloneFamily(newKey), family));
+    assertTrue(Arrays.equals(CellUtil.cloneQualifier(newKey), qualB));
     assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP);
     assertTrue(newKey.getTypeByte() == Type.Maximum.getCode());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index 1bfd18c..dfc5569 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
@@ -131,7 +132,7 @@ public class TestHFileBlock {
 
       // generate it or repeat, it should compress well
       if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
-        row = keyValues.get(randomizer.nextInt(keyValues.size())).getRow();
+        row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size())));
       } else {
         row = new byte[FIELD_LENGTH];
         randomizer.nextBytes(row);
@@ -140,17 +141,16 @@ public class TestHFileBlock {
         family = new byte[FIELD_LENGTH];
         randomizer.nextBytes(family);
       } else {
-        family = keyValues.get(0).getFamily();
+        family = CellUtil.cloneFamily(keyValues.get(0));
       }
       if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
-        qualifier = keyValues.get(
-            randomizer.nextInt(keyValues.size())).getQualifier();
+        qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size())));
       } else {
         qualifier = new byte[FIELD_LENGTH];
         randomizer.nextBytes(qualifier);
       }
       if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
-        value = keyValues.get(randomizer.nextInt(keyValues.size())).getValue();
+        value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size())));
       } else {
         value = new byte[FIELD_LENGTH];
         randomizer.nextBytes(value);
@@ -837,7 +837,7 @@ public class TestHFileBlock {
                           .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                           .withChecksumType(ChecksumType.NULL).build();
       HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
-          HFileBlock.FILL_HEADER, -1, 
+          HFileBlock.FILL_HEADER, -1,
           0, meta);
       long byteBufferExpectedSize =
           ClassSize.align(ClassSize.estimateBase(buf.getClass(), true)

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
index fc44f3c..ebe35b3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
@@ -480,7 +480,7 @@ public class TestHFileBlockCompatibility {
       this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream);
       this.unencodedDataSizeWritten += kv.getLength();
       if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) {
-        this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getMvccVersion());
+        this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getSequenceId());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
index 883f60e..253dff8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
@@ -125,7 +125,7 @@ public class TestHFileWriterV2 {
     writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));
 
     writer.close();
-    
+
 
     FSDataInputStream fsdis = fs.open(hfilePath);
 
@@ -144,7 +144,7 @@ public class TestHFileWriterV2 {
                         .withIncludesTags(false)
                         .withCompression(compressAlgo)
                         .build();
-    
+
     HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(fsdis, fileSize, meta);
     // Comparator class name is stored in the trailer in version 2.
     CellComparator comparator = trailer.createComparator();
@@ -162,12 +162,12 @@ public class TestHFileWriterV2 {
     dataBlockIndexReader.readMultiLevelIndexRoot(
         blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX),
         trailer.getDataIndexCount());
-    
+
     if (findMidKey) {
       Cell midkey = dataBlockIndexReader.midkey();
       assertNotNull("Midkey should not be null", midkey);
     }
-    
+
     // Meta index.
     metaBlockIndexReader.readRootIndex(
         blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX)
@@ -215,8 +215,10 @@ public class TestHFileWriterV2 {
         }
 
         // A brute-force check to see that all keys and values are correct.
-        assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0);
-        assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0);
+        KeyValue kv = keyValues.get(entriesRead);
+        assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
+        assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
+          kv.getValueLength()) == 0);
 
         ++entriesRead;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index e9ba089..9adeaca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@@ -160,7 +159,7 @@ public class TestHFileWriterV3 {
     writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));
 
     writer.close();
-    
+
 
     FSDataInputStream fsdis = fs.open(hfilePath);
 
@@ -192,12 +191,12 @@ public class TestHFileWriterV3 {
     // the root level.
     dataBlockIndexReader.readMultiLevelIndexRoot(
         blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount());
-    
+
     if (findMidKey) {
       Cell midkey = dataBlockIndexReader.midkey();
       assertNotNull("Midkey should not be null", midkey);
     }
-    
+
     // Meta index.
     metaBlockIndexReader.readRootIndex(
         blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX)
@@ -240,7 +239,7 @@ public class TestHFileWriterV3 {
           tagValue = new byte[tagLen];
           buf.get(tagValue);
         }
-      
+
         if (includeMemstoreTS) {
           ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset()
               + buf.position(), buf.remaining());
@@ -251,11 +250,13 @@ public class TestHFileWriterV3 {
         }
 
         // A brute-force check to see that all keys and values are correct.
-        assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0);
-        assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0);
+        KeyValue kv = keyValues.get(entriesRead);
+        assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
+        assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
+          kv.getValueLength()) == 0);
         if (useTags) {
           assertNotNull(tagValue);
-          KeyValue tkv =  keyValues.get(entriesRead);
+          KeyValue tkv =  kv;
           assertEquals(tagValue.length, tkv.getTagsLength());
           assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
               tkv.getTagsOffset(), tkv.getTagsLength()) == 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
index 1de21f3..bd5b098 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
@@ -70,11 +70,11 @@ public class TestSeekTo {
     return paramList;
   }
   static boolean switchKVs = false;
-  
+
   public TestSeekTo(DataBlockEncoding encoding) {
     this.encoding = encoding;
   }
-  
+
   @Before
   public void setUp() {
     //reset
@@ -107,7 +107,8 @@ public class TestSeekTo {
     }
   }
   static String toRowStr(Cell kv) {
-    return Bytes.toString(KeyValueUtil.ensureKeyValue(kv).getRow());
+    KeyValue c = KeyValueUtil.ensureKeyValue(kv);
+    return Bytes.toString(c.getRowArray(), c.getRowOffset(), c.getRowLength());
   }
 
   Path makeNewFile(TagUsage tagUsage) throws IOException {
@@ -338,7 +339,7 @@ public class TestSeekTo {
     Configuration conf = TEST_UTIL.getConfiguration();
     HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
-    HFileBlockIndex.BlockIndexReader blockIndexReader = 
+    HFileBlockIndex.BlockIndexReader blockIndexReader =
       reader.getDataBlockIndexReader();
     System.out.println(blockIndexReader.toString());
     // falls before the start of the file.

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index b0b35fa..90b92bb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -34,8 +34,6 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.Callable;
 
-import junit.framework.Assert;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -91,6 +89,8 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
+import junit.framework.Assert;
+
 /**
  * Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}.
  * Sets up and runs a mapreduce job that writes hfile output.
@@ -201,8 +201,11 @@ public class TestHFileOutputFormat  {
       KeyValue original = kv.clone();
       writer.write(new ImmutableBytesWritable(), kv);
       assertFalse(original.equals(kv));
-      assertTrue(Bytes.equals(original.getRow(), kv.getRow()));
-      assertTrue(CellUtil.matchingColumn(original, kv.getFamily(), kv.getQualifier()));
+      assertTrue(Bytes.equals(original.getRowArray(), original.getRowOffset(),
+        original.getRowLength(), kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
+      assertTrue(CellUtil.matchingColumn(original, kv.getFamilyArray(), kv.getFamilyOffset(),
+        kv.getFamilyLength(), kv.getQualifierArray(), kv.getQualifierOffset(),
+        kv.getQualifierLength()));
       assertNotSame(original.getTimestamp(), kv.getTimestamp());
       assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 1109ae2..a999624 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -344,8 +344,8 @@ public class TestImportExport {
     assertEquals(now, res[6].getTimestamp());
     t.close();
   }
-  
-  
+
+
   @Test
   public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
     TableName EXPORT_TABLE =
@@ -376,8 +376,8 @@ public class TestImportExport {
     //Add second Delete family marker
     d = new Delete(ROW1, now+7);
     exportT.delete(d);
-    
-    
+
+
     String[] args = new String[] {
         "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(),
         FQ_OUTPUT_DIR,
@@ -403,10 +403,10 @@ public class TestImportExport {
     Scan s = new Scan();
     s.setMaxVersions();
     s.setRaw(true);
-    
+
     ResultScanner importedTScanner = importT.getScanner(s);
     Result importedTResult = importedTScanner.next();
-    
+
     ResultScanner exportedTScanner = exportT.getScanner(s);
     Result  exportedTResult =  exportedTScanner.next();
     try
@@ -504,7 +504,7 @@ public class TestImportExport {
     results.close();
     return count;
   }
-  
+
   /**
    * test main method. Import should print help and call System.exit
    */
@@ -586,7 +586,7 @@ public class TestImportExport {
         ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
         KeyValue key = (KeyValue) invocation.getArguments()[1];
         assertEquals("Key", Bytes.toString(writer.get()));
-        assertEquals("row", Bytes.toString(key.getRow()));
+        assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
         return null;
       }
     }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class));
@@ -616,7 +616,7 @@ public class TestImportExport {
     args.add("param2");
 
     Import.addFilterAndArguments(configuration, FilterBase.class, args);
-    assertEquals("org.apache.hadoop.hbase.filter.FilterBase", 
+    assertEquals("org.apache.hadoop.hbase.filter.FilterBase",
         configuration.get(Import.FILTER_CLASS_CONF_KEY));
     assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
   }
@@ -700,5 +700,5 @@ public class TestImportExport {
     public boolean isWALVisited() {
       return isVisited;
     }
-  }  
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 6dc1d9f..a12887e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -33,11 +33,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -45,13 +45,13 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper;
-import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.util.ToolRunner;
@@ -123,7 +123,7 @@ public class TestWALPlayer {
         new String[] {walInputDir, TABLENAME1.getNameAsString(),
         TABLENAME2.getNameAsString() }));
 
-    
+
     // verify the WAL was player into table 2
     Get g = new Get(ROW);
     Result r = t2.get(g);
@@ -151,15 +151,13 @@ public class TestWALPlayer {
     WALKey key = mock(WALKey.class);
     when(key.getTablename()).thenReturn(TableName.valueOf("table"));
     @SuppressWarnings("unchecked")
-    Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context =
-        mock(Context.class);
+    Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context = mock(Context.class);
     when(context.getConfiguration()).thenReturn(configuration);
 
     WALEdit value = mock(WALEdit.class);
     ArrayList<Cell> values = new ArrayList<Cell>();
-    KeyValue kv1 = mock(KeyValue.class);
-    when(kv1.getFamily()).thenReturn(Bytes.toBytes("family"));
-    when(kv1.getRow()).thenReturn(Bytes.toBytes("row"));
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), null);
+
     values.add(kv1);
     when(value.getCells()).thenReturn(values);
     mapper.setup(context);
@@ -171,7 +169,7 @@ public class TestWALPlayer {
         ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
         KeyValue key = (KeyValue) invocation.getArguments()[1];
         assertEquals("row", Bytes.toString(writer.get()));
-        assertEquals("row", Bytes.toString(key.getRow()));
+        assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
         return null;
       }
     }).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class));

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
index 013053e..1fcb366 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
@@ -100,7 +100,7 @@ public class TestWALRecordReader {
     fs = TEST_UTIL.getDFSCluster().getFileSystem();
 
     hbaseDir = TEST_UTIL.createRootDir();
-    
+
     logDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME);
 
     htd = new HTableDescriptor(tableName);
@@ -152,7 +152,7 @@ public class TestWALRecordReader {
     walfactory.shutdown();
     LOG.info("Closed WAL " + log.toString());
 
- 
+
     WALInputFormat input = new WALInputFormat();
     Configuration jobConf = new Configuration(conf);
     jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString());
@@ -257,9 +257,14 @@ public class TestWALRecordReader {
     for (byte[] column : columns) {
       assertTrue(reader.nextKeyValue());
       Cell cell = reader.getCurrentValue().getCells().get(0);
-      if (!Bytes.equals(column, cell.getQualifier())) {
-        assertTrue("expected [" + Bytes.toString(column) + "], actual ["
-            + Bytes.toString(cell.getQualifier()) + "]", false);
+      if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(),
+        cell.getQualifierOffset(), cell.getQualifierLength())) {
+        assertTrue(
+          "expected ["
+              + Bytes.toString(column)
+              + "], actual ["
+              + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(),
+                cell.getQualifierLength()) + "]", false);
       }
     }
     assertFalse(reader.nextKeyValue());

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index aa57e22..4ce228f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -285,7 +285,7 @@ public class TestBulkLoad {
 
     @Override
     protected boolean matchesSafely(WALEdit item) {
-      assertTrue(Arrays.equals(item.getCells().get(0).getQualifier(), typeBytes));
+      assertTrue(Arrays.equals(CellUtil.cloneQualifier(item.getCells().get(0)), typeBytes));
       BulkLoadDescriptor desc;
       try {
         desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0));

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
index c4e0a42..684839d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
@@ -25,6 +25,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -49,15 +50,18 @@ public class TestCellSkipListSet extends TestCase {
     assertEquals(1, this.csls.size());
     Cell first = this.csls.first();
     assertTrue(kv.equals(first));
-    assertTrue(Bytes.equals(kv.getValue(), first.getValue()));
+    assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(),
+      first.getValueArray(), first.getValueOffset(), first.getValueLength()));
     // Now try overwritting
     byte [] overwriteValue = Bytes.toBytes("overwrite");
     KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue);
     this.csls.add(overwrite);
     assertEquals(1, this.csls.size());
     first = this.csls.first();
-    assertTrue(Bytes.equals(overwrite.getValue(), first.getValue()));
-    assertFalse(Bytes.equals(overwrite.getValue(), kv.getValue()));
+    assertTrue(Bytes.equals(overwrite.getValueArray(), overwrite.getValueOffset(),
+      overwrite.getValueLength(), first.getValueArray(), first.getValueOffset(),
+      first.getValueLength()));
+    assertFalse(Bytes.equals(CellUtil.cloneValue(overwrite), CellUtil.cloneValue(kv)));
   }
 
   public void testIterator() throws Exception {
@@ -71,8 +75,10 @@ public class TestCellSkipListSet extends TestCase {
     // Assert that we added 'total' values and that they are in order
     int count = 0;
     for (Cell kv: this.csls) {
-      assertEquals("" + count, Bytes.toString(kv.getQualifier()));
-      assertTrue(Bytes.equals(kv.getValue(), value1));
+      assertEquals("" + count,
+        Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
+      assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
+        0, value1.length));
       count++;
     }
     assertEquals(total, count);
@@ -83,9 +89,11 @@ public class TestCellSkipListSet extends TestCase {
     // Assert that we added 'total' values and that they are in order and that
     // we are getting back value2
     count = 0;
-    for (Cell kv: this.csls) {
-      assertEquals("" + count, Bytes.toString(kv.getQualifier()));
-      assertTrue(Bytes.equals(kv.getValue(), value2));
+    for (Cell kv : this.csls) {
+      assertEquals("" + count,
+        Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
+      assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
+        0, value2.length));
       count++;
     }
     assertEquals(total, count);
@@ -103,8 +111,10 @@ public class TestCellSkipListSet extends TestCase {
     int count = 0;
     for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
       Cell kv = i.next();
-      assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier()));
-      assertTrue(Bytes.equals(kv.getValue(), value1));
+      assertEquals("" + (total - (count + 1)),
+        Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
+      assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
+        0, value1.length));
       count++;
     }
     assertEquals(total, count);
@@ -117,8 +127,10 @@ public class TestCellSkipListSet extends TestCase {
     count = 0;
     for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
       Cell kv = i.next();
-      assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier()));
-      assertTrue(Bytes.equals(kv.getValue(), value2));
+      assertEquals("" + (total - (count + 1)),
+        Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
+      assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
+        0, value2.length));
       count++;
     }
     assertEquals(total, count);
@@ -145,8 +157,10 @@ public class TestCellSkipListSet extends TestCase {
       this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2));
     }
     tail = this.csls.tailSet(splitter);
-    assertTrue(Bytes.equals(tail.first().getValue(), value2));
+    assertTrue(Bytes.equals(tail.first().getValueArray(), tail.first().getValueOffset(),
+      tail.first().getValueLength(), value2, 0, value2.length));
     head = this.csls.headSet(splitter);
-    assertTrue(Bytes.equals(head.first().getValue(), value2));
+    assertTrue(Bytes.equals(head.first().getValueArray(), head.first().getValueOffset(),
+      head.first().getValueLength(), value2, 0, value2.length));
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index fce81fc..5b86169 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -207,8 +207,8 @@ public class TestCompoundBloomFilter {
       // Test for false negatives (not allowed).
       int numChecked = 0;
       for (KeyValue kv : kvs) {
-        byte[] row = kv.getRow();
-        boolean present = isInBloom(scanner, row, kv.getQualifier());
+        byte[] row = CellUtil.cloneRow(kv);
+        boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv));
         assertTrue(testIdMsg + " Bloom filter false negative on row "
             + Bytes.toStringBinary(row) + " after " + numChecked
             + " successful checks", present);
@@ -358,9 +358,10 @@ public class TestCompoundBloomFilter {
     KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey);
     KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey);
     assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp());
-    assertEquals(Bytes.toStringBinary(rowKV.getRow()),
-        Bytes.toStringBinary(rowColKV.getRow()));
-    assertEquals(0, rowKV.getQualifier().length);
+    assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(),
+      rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(),
+      rowColKV.getRowLength()));
+    assertEquals(0, rowKV.getQualifierLength());
   }
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3d30892/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 7857590..4848d66 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -70,7 +70,7 @@ public class TestDefaultMemStore extends TestCase {
   private static final int QUALIFIER_COUNT = ROW_COUNT;
   private static final byte [] FAMILY = Bytes.toBytes("column");
   private MultiVersionConsistencyControl mvcc;
-  private AtomicLong startSeqNum = new AtomicLong(0); 
+  private AtomicLong startSeqNum = new AtomicLong(0);
 
   @Override
   public void setUp() throws Exception {
@@ -88,7 +88,9 @@ public class TestDefaultMemStore extends TestCase {
     this.memstore.add(samekey);
     Cell found = this.memstore.cellSet.first();
     assertEquals(1, this.memstore.cellSet.size());
-    assertTrue(Bytes.toString(found.getValue()), CellUtil.matchingValue(samekey, found));
+    assertTrue(
+      Bytes.toString(found.getValueArray(), found.getValueOffset(), found.getValueLength()),
+      CellUtil.matchingValue(samekey, found));
   }
 
   /**
@@ -179,7 +181,7 @@ public class TestDefaultMemStore extends TestCase {
   /**
    * A simple test which verifies the 3 possible states when scanning across snapshot.
    * @throws IOException
-   * @throws CloneNotSupportedException 
+   * @throws CloneNotSupportedException
    */
   public void testScanAcrossSnapshot2() throws IOException, CloneNotSupportedException {
     // we are going to the scanning across snapshot with two kvs
@@ -843,7 +845,7 @@ public class TestDefaultMemStore extends TestCase {
     assert(newSize > oldSize);
     //The kv1 should be removed.
     assert(memstore.cellSet.size() == 2);
-    
+
     KeyValue kv4 = KeyValueTestUtil.create("r", "f", "q", 104, "v");
     kv4.setSequenceId(1);
     l.clear(); l.add(kv4);
@@ -855,12 +857,12 @@ public class TestDefaultMemStore extends TestCase {
   }
 
   ////////////////////////////////////
-  // Test for periodic memstore flushes 
+  // Test for periodic memstore flushes
   // based on time of oldest edit
   ////////////////////////////////////
 
   /**
-   * Tests that the timeOfOldestEdit is updated correctly for the 
+   * Tests that the timeOfOldestEdit is updated correctly for the
    * various edit operations in memstore.
    * @throws Exception
    */
@@ -876,7 +878,7 @@ public class TestDefaultMemStore extends TestCase {
       memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v"));
       t = memstore.timeOfOldestEdit();
       assertTrue(t == 1234);
-      // snapshot() will reset timeOfOldestEdit. The method will also assert the 
+      // snapshot() will reset timeOfOldestEdit. The method will also assert the
       // value is reset to Long.MAX_VALUE
       t = runSnapshot(memstore);
 
@@ -903,7 +905,7 @@ public class TestDefaultMemStore extends TestCase {
    * Tests the HRegion.shouldFlush method - adds an edit in the memstore
    * and checks that shouldFlush returns true, and another where it disables
    * the periodic flush functionality and tests whether shouldFlush returns
-   * false. 
+   * false.
    * @throws Exception
    */
   public void testShouldFlush() throws Exception {
@@ -973,7 +975,7 @@ public class TestDefaultMemStore extends TestCase {
     long t = 1234;
     @Override
     public long currentTime() {
-      return t; 
+      return t;
     }
     public void setCurrentTimeMillis(long t) {
       this.t = t;