You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ch...@apache.org on 2017/08/30 02:46:49 UTC

hbase git commit: HBASE-15947 Classes used only for tests included in main code base

Repository: hbase
Updated Branches:
  refs/heads/branch-2 ca161288f -> a722c7ecd


HBASE-15947 Classes used only for tests included in main code base

Signed-off-by: Chia-Ping Tsai <ch...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a722c7ec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a722c7ec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a722c7ec

Branch: refs/heads/branch-2
Commit: a722c7ecdf4c4ae8ac7968939d57f1523a08371e
Parents: ca16128
Author: Sean Mackrory <ma...@apache.org>
Authored: Sun Aug 27 00:30:04 2017 +0800
Committer: Chia-Ping Tsai <ch...@gmail.com>
Committed: Wed Aug 30 10:40:45 2017 +0800

----------------------------------------------------------------------
 .../hbase/util/test/LoadTestKVGenerator.java    | 121 ----
 .../hbase/util/test/RedundantKVGenerator.java   | 567 -------------------
 .../hadoop/hbase/util/LoadTestKVGenerator.java  | 119 ++++
 .../hadoop/hbase/util/RedundantKVGenerator.java | 563 ++++++++++++++++++
 .../hbase/util/TestLoadTestKVGenerator.java     |   1 -
 .../hbase/IntegrationTestLazyCfLoading.java     |   2 +-
 .../StripeCompactionsPerformanceEvaluation.java |   2 +-
 .../column/data/TestColumnDataRandom.java       |   2 +-
 .../row/data/TestRowDataRandomKeyValues.java    |   2 +-
 .../TestRowDataRandomKeyValuesWithTags.java     |   2 +-
 .../hadoop/hbase/client/TestFastFail.java       |   2 +-
 .../io/encoding/TestDataBlockEncoders.java      |   2 +-
 .../hbase/io/encoding/TestEncodedSeekers.java   |   2 +-
 .../io/hfile/TestHFileDataBlockEncoder.java     |   2 +-
 .../hbase/io/hfile/TestHFileEncryption.java     |   2 +-
 .../TestSimpleRegionNormalizerOnCluster.java    |   3 +-
 .../util/LoadTestDataGeneratorWithMOB.java      |   2 -
 .../hadoop/hbase/util/MultiThreadedAction.java  |   1 -
 .../hbase/util/test/LoadTestDataGenerator.java  |   1 +
 19 files changed, 694 insertions(+), 704 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
deleted file mode 100644
index 4edd270..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hbase.util.test;
-
-import java.util.Random;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.MD5Hash;
-
-/**
- * A generator of random keys and values for load testing. Keys are generated
- * by converting numeric indexes to strings and prefixing them with an MD5
- * hash. Values are generated by selecting value size in the configured range
- * and generating a pseudo-random sequence of bytes seeded by key, column
- * qualifier, and value size.
- */
-@InterfaceAudience.Private
-public class LoadTestKVGenerator {
-
-  private static final Log LOG = LogFactory.getLog(LoadTestKVGenerator.class);
-  private static int logLimit = 10;
-
-  /** A random number generator for determining value size */
-  private Random randomForValueSize = new Random();
-
-  private final int minValueSize;
-  private final int maxValueSize;
-
-  public LoadTestKVGenerator(int minValueSize, int maxValueSize) {
-    if (minValueSize <= 0 || maxValueSize <= 0) {
-      throw new IllegalArgumentException("Invalid min/max value sizes: " +
-          minValueSize + ", " + maxValueSize);
-    }
-    this.minValueSize = minValueSize;
-    this.maxValueSize = maxValueSize;
-  }
-
-  /**
-   * Verifies that the given byte array is the same as what would be generated
-   * for the given seed strings (row/cf/column/...). We are assuming that the
-   * value size is correct, and only verify the actual bytes. However, if the
-   * min/max value sizes are set sufficiently high, an accidental match should be
-   * extremely improbable.
-   */
-  public static boolean verify(byte[] value, byte[]... seedStrings) {
-    byte[] expectedData = getValueForRowColumn(value.length, seedStrings);
-    boolean equals = Bytes.equals(expectedData, value);
-    if (!equals && LOG.isDebugEnabled() && logLimit > 0) {
-      LOG.debug("verify failed, expected value: " + Bytes.toStringBinary(expectedData)
-        + " actual value: "+ Bytes.toStringBinary(value));
-      logLimit--; // this is not thread safe, but at worst we will have more logging
-    }
-    return equals;
-  }
-
-  /**
-   * Converts the given key to string, and prefixes it with the MD5 hash of
-   * the index's string representation.
-   */
-  public static String md5PrefixedKey(long key) {
-    String stringKey = Long.toString(key);
-    String md5hash = MD5Hash.getMD5AsHex(Bytes.toBytes(stringKey));
-
-    // flip the key to randomize
-    return md5hash + "-" + stringKey;
-  }
-
-  /**
-   * Generates a value for the given key index and column qualifier. Size is
-   * selected randomly in the configured range. The generated value depends
-   * only on the combination of the strings passed (key/cf/column/...) and the selected
-   * value size. This allows to verify the actual value bytes when reading, as done
-   * in {#verify(byte[], byte[]...)}
-   * This method is as thread-safe as Random class. It appears that the worst bug ever
-   * found with the latter is that multiple threads will get some duplicate values, which
-   * we don't care about.
-   */
-  public byte[] generateRandomSizeValue(byte[]... seedStrings) {
-    int dataSize = minValueSize;
-    if(minValueSize != maxValueSize) {
-      dataSize = minValueSize + randomForValueSize.nextInt(Math.abs(maxValueSize - minValueSize));
-    }
-    return getValueForRowColumn(dataSize, seedStrings);
-  }
-
-  /**
-   * Generates random bytes of the given size for the given row and column
-   * qualifier. The random seed is fully determined by these parameters.
-   */
-  private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) {
-    long seed = dataSize;
-    for (byte[] str : seedStrings) {
-      final String bytesString = Bytes.toString(str);
-      if (bytesString != null) {
-        seed += bytesString.hashCode();
-      }
-    }
-    Random seededRandom = new Random(seed);
-    byte[] randomBytes = new byte[dataSize];
-    seededRandom.nextBytes(randomBytes);
-    return randomBytes;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
deleted file mode 100644
index 2975711..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
+++ /dev/null
@@ -1,567 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hbase.util.test;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.hadoop.hbase.ArrayBackedTag;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.ByteBufferKeyValue;
-import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.util.ByteBufferUtils;
-import org.apache.hadoop.io.WritableUtils;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes;
-
-/**
- * Generate list of key values which are very useful to test data block encoding
- * and compression.
- */
-@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-    value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
-    justification="Should probably fix")
-@InterfaceAudience.Private
-public class RedundantKVGenerator {
-  // row settings
-  static byte[] DEFAULT_COMMON_PREFIX = new byte[0];
-  static int DEFAULT_NUMBER_OF_ROW_PREFIXES = 10;
-  static int DEFAULT_AVERAGE_PREFIX_LENGTH = 6;
-  static int DEFAULT_PREFIX_LENGTH_VARIANCE = 3;
-  static int DEFAULT_AVERAGE_SUFFIX_LENGTH = 3;
-  static int DEFAULT_SUFFIX_LENGTH_VARIANCE = 3;
-  static int DEFAULT_NUMBER_OF_ROW = 500;
-
-  // qualifier
-  static float DEFAULT_CHANCE_FOR_SAME_QUALIFIER = 0.5f;
-  static float DEFAULT_CHANCE_FOR_SIMILIAR_QUALIFIER = 0.4f;
-  static int DEFAULT_AVERAGE_QUALIFIER_LENGTH = 9;
-  static int DEFAULT_QUALIFIER_LENGTH_VARIANCE = 3;
-
-  static int DEFAULT_COLUMN_FAMILY_LENGTH = 9;
-  static int DEFAULT_VALUE_LENGTH = 8;
-  static float DEFAULT_CHANCE_FOR_ZERO_VALUE = 0.5f;
-
-  static int DEFAULT_BASE_TIMESTAMP_DIVIDE = 1000000;
-  static int DEFAULT_TIMESTAMP_DIFF_SIZE = 100000000;
-
-  /**
-   * Default constructor, assumes all parameters from class constants.
-   */
-  public RedundantKVGenerator() {
-    this(new Random(42L),
-        DEFAULT_NUMBER_OF_ROW_PREFIXES,
-        DEFAULT_AVERAGE_PREFIX_LENGTH,
-        DEFAULT_PREFIX_LENGTH_VARIANCE,
-        DEFAULT_AVERAGE_SUFFIX_LENGTH,
-        DEFAULT_SUFFIX_LENGTH_VARIANCE,
-        DEFAULT_NUMBER_OF_ROW,
-
-        DEFAULT_CHANCE_FOR_SAME_QUALIFIER,
-        DEFAULT_CHANCE_FOR_SIMILIAR_QUALIFIER,
-        DEFAULT_AVERAGE_QUALIFIER_LENGTH,
-        DEFAULT_QUALIFIER_LENGTH_VARIANCE,
-
-        DEFAULT_COLUMN_FAMILY_LENGTH,
-        DEFAULT_VALUE_LENGTH,
-        DEFAULT_CHANCE_FOR_ZERO_VALUE,
-
-        DEFAULT_BASE_TIMESTAMP_DIVIDE,
-        DEFAULT_TIMESTAMP_DIFF_SIZE
-    );
-  }
-
-
-  /**
-   * Various configuration options for generating key values
-   * @param randomizer pick things by random
-   */
-  public RedundantKVGenerator(Random randomizer,
-      int numberOfRowPrefixes,
-      int averagePrefixLength,
-      int prefixLengthVariance,
-      int averageSuffixLength,
-      int suffixLengthVariance,
-      int numberOfRows,
-
-      float chanceForSameQualifier,
-      float chanceForSimiliarQualifier,
-      int averageQualifierLength,
-      int qualifierLengthVariance,
-
-      int columnFamilyLength,
-      int valueLength,
-      float chanceForZeroValue,
-
-      int baseTimestampDivide,
-      int timestampDiffSize
-      ) {
-    this.randomizer = randomizer;
-
-    this.commonPrefix = DEFAULT_COMMON_PREFIX;
-    this.numberOfRowPrefixes = numberOfRowPrefixes;
-    this.averagePrefixLength = averagePrefixLength;
-    this.prefixLengthVariance = prefixLengthVariance;
-    this.averageSuffixLength = averageSuffixLength;
-    this.suffixLengthVariance = suffixLengthVariance;
-    this.numberOfRows = numberOfRows;
-
-    this.chanceForSameQualifier = chanceForSameQualifier;
-    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
-    this.averageQualifierLength = averageQualifierLength;
-    this.qualifierLengthVariance = qualifierLengthVariance;
-
-    this.columnFamilyLength = columnFamilyLength;
-    this.valueLength = valueLength;
-    this.chanceForZeroValue = chanceForZeroValue;
-
-    this.baseTimestampDivide = baseTimestampDivide;
-    this.timestampDiffSize = timestampDiffSize;
-  }
-
-  /** Used to generate dataset */
-  private Random randomizer;
-
-  // row settings
-  private byte[] commonPrefix;//global prefix before rowPrefixes
-  private int numberOfRowPrefixes;
-  private int averagePrefixLength = 6;
-  private int prefixLengthVariance = 3;
-  private int averageSuffixLength = 3;
-  private int suffixLengthVariance = 3;
-  private int numberOfRows = 500;
-
-  //family
-  private byte[] family;
-
-  // qualifier
-  private float chanceForSameQualifier = 0.5f;
-  private float chanceForSimilarQualifier = 0.4f;
-  private int averageQualifierLength = 9;
-  private int qualifierLengthVariance = 3;
-
-  private int columnFamilyLength = 9;
-  private int valueLength = 8;
-  private float chanceForZeroValue = 0.5f;
-
-  private int baseTimestampDivide = 1000000;
-  private int timestampDiffSize = 100000000;
-
-  private List<byte[]> generateRows() {
-    // generate prefixes
-    List<byte[]> prefixes = new ArrayList<>();
-    prefixes.add(new byte[0]);
-    for (int i = 1; i < numberOfRowPrefixes; ++i) {
-      int prefixLength = averagePrefixLength;
-      prefixLength += randomizer.nextInt(2 * prefixLengthVariance + 1) -
-          prefixLengthVariance;
-      byte[] newPrefix = new byte[prefixLength];
-      randomizer.nextBytes(newPrefix);
-      byte[] newPrefixWithCommon = newPrefix;
-      prefixes.add(newPrefixWithCommon);
-    }
-
-    // generate rest of the row
-    List<byte[]> rows = new ArrayList<>();
-    for (int i = 0; i < numberOfRows; ++i) {
-      int suffixLength = averageSuffixLength;
-      suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
-          suffixLengthVariance;
-      int randomPrefix = randomizer.nextInt(prefixes.size());
-      byte[] row = new byte[prefixes.get(randomPrefix).length +
-                            suffixLength];
-      byte[] rowWithCommonPrefix = Bytes.concat(commonPrefix, row);
-      rows.add(rowWithCommonPrefix);
-    }
-
-    return rows;
-  }
-
-  /**
-   * Generate test data useful to test encoders.
-   * @param howMany How many Key values should be generated.
-   * @return sorted list of key values
-   */
-  public List<KeyValue> generateTestKeyValues(int howMany) {
-    return generateTestKeyValues(howMany, false);
-  }
-  /**
-   * Generate test data useful to test encoders.
-   * @param howMany How many Key values should be generated.
-   * @return sorted list of key values
-   */
-  public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
-    List<KeyValue> result = new ArrayList<>();
-
-    List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
-
-    if(family==null){
-      family = new byte[columnFamilyLength];
-      randomizer.nextBytes(family);
-    }
-
-    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
-
-    byte[] value = new byte[valueLength];
-
-    for (int i = 0; i < howMany; ++i) {
-      long timestamp = baseTimestamp;
-      if(timestampDiffSize > 0){
-        timestamp += randomizer.nextInt(timestampDiffSize);
-      }
-      Integer rowId = randomizer.nextInt(rows.size());
-      byte[] row = rows.get(rowId);
-
-      // generate qualifier, sometimes it is same, sometimes similar,
-      // occasionally completely different
-      byte[] qualifier;
-      float qualifierChance = randomizer.nextFloat();
-      if (!rowsToQualifier.containsKey(rowId)
-          || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
-        int qualifierLength = averageQualifierLength;
-        qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1)
-            - qualifierLengthVariance;
-        qualifier = new byte[qualifierLength];
-        randomizer.nextBytes(qualifier);
-
-        // add it to map
-        if (!rowsToQualifier.containsKey(rowId)) {
-          rowsToQualifier.put(rowId, new ArrayList<>());
-        }
-        rowsToQualifier.get(rowId).add(qualifier);
-      } else if (qualifierChance > chanceForSameQualifier) {
-        // similar qualifier
-        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers
-            .size()));
-
-        qualifier = new byte[originalQualifier.length];
-        int commonPrefix = randomizer.nextInt(qualifier.length);
-        System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
-        for (int j = commonPrefix; j < qualifier.length; ++j) {
-          qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
-        }
-
-        rowsToQualifier.get(rowId).add(qualifier);
-      } else {
-        // same qualifier
-        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
-      }
-
-      if (randomizer.nextFloat() < chanceForZeroValue) {
-        for (int j = 0; j < value.length; ++j) {
-          value[j] = (byte) 0;
-        }
-      } else {
-        randomizer.nextBytes(value);
-      }
-
-      if (useTags) {
-        result.add(new KeyValue(row, family, qualifier, timestamp, value,
-            new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
-      } else {
-        result.add(new KeyValue(row, family, qualifier, timestamp, value));
-      }
-    }
-
-    Collections.sort(result, CellComparator.COMPARATOR);
-
-    return result;
-  }
-
-  /**
-   * Generate test data useful to test encoders.
-   * @param howMany How many Key values should be generated.
-   * @return sorted list of key values
-   */
-  public List<Cell> generateTestExtendedOffheapKeyValues(int howMany, boolean useTags) {
-    List<Cell> result = new ArrayList<>();
-    List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
-
-    if (family == null) {
-      family = new byte[columnFamilyLength];
-      randomizer.nextBytes(family);
-    }
-
-    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
-
-    byte[] value = new byte[valueLength];
-
-    for (int i = 0; i < howMany; ++i) {
-      long timestamp = baseTimestamp;
-      if(timestampDiffSize > 0){
-        timestamp += randomizer.nextInt(timestampDiffSize);
-      }
-      Integer rowId = randomizer.nextInt(rows.size());
-      byte[] row = rows.get(rowId);
-
-      // generate qualifier, sometimes it is same, sometimes similar,
-      // occasionally completely different
-      byte[] qualifier;
-      float qualifierChance = randomizer.nextFloat();
-      if (!rowsToQualifier.containsKey(rowId)
-          || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
-        int qualifierLength = averageQualifierLength;
-        qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1)
-            - qualifierLengthVariance;
-        qualifier = new byte[qualifierLength];
-        randomizer.nextBytes(qualifier);
-
-        // add it to map
-        if (!rowsToQualifier.containsKey(rowId)) {
-          rowsToQualifier.put(rowId, new ArrayList<>());
-        }
-        rowsToQualifier.get(rowId).add(qualifier);
-      } else if (qualifierChance > chanceForSameQualifier) {
-        // similar qualifier
-        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers
-            .size()));
-
-        qualifier = new byte[originalQualifier.length];
-        int commonPrefix = randomizer.nextInt(qualifier.length);
-        System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
-        for (int j = commonPrefix; j < qualifier.length; ++j) {
-          qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
-        }
-
-        rowsToQualifier.get(rowId).add(qualifier);
-      } else {
-        // same qualifier
-        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
-      }
-
-      if (randomizer.nextFloat() < chanceForZeroValue) {
-        for (int j = 0; j < value.length; ++j) {
-          value[j] = (byte) 0;
-        }
-      } else {
-        randomizer.nextBytes(value);
-      }
-      if (useTags) {
-        KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value,
-            new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
-        ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
-        ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
-          keyValue.getOffset(), keyValue.getLength());
-        ByteBufferKeyValue offheapKV =
-            new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
-        result.add(offheapKV);
-      } else {
-        KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value);
-        ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
-        ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
-          keyValue.getOffset(), keyValue.getLength());
-        ByteBufferKeyValue offheapKV =
-            new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
-        result.add(offheapKV);
-      }
-    }
-
-    Collections.sort(result, CellComparator.COMPARATOR);
-
-    return result;
-  }
-
-  static class ExtendedOffheapKeyValue extends ByteBufferKeyValue {
-    public ExtendedOffheapKeyValue(ByteBuffer buf, int offset, int length, long seqId) {
-      super(buf, offset, length, seqId);
-    }
-
-    @Override
-    public byte[] getRowArray() {
-      throw new IllegalArgumentException("getRowArray operation is not allowed");
-    }
-
-    @Override
-    public int getRowOffset() {
-      throw new IllegalArgumentException("getRowOffset operation is not allowed");
-    }
-
-    @Override
-    public byte[] getFamilyArray() {
-      throw new IllegalArgumentException("getFamilyArray operation is not allowed");
-    }
-
-    @Override
-    public int getFamilyOffset() {
-      throw new IllegalArgumentException("getFamilyOffset operation is not allowed");
-    }
-
-    @Override
-    public byte[] getQualifierArray() {
-      throw new IllegalArgumentException("getQualifierArray operation is not allowed");
-    }
-
-    @Override
-    public int getQualifierOffset() {
-      throw new IllegalArgumentException("getQualifierOffset operation is not allowed");
-    }
-
-    @Override
-    public byte[] getValueArray() {
-      throw new IllegalArgumentException("getValueArray operation is not allowed");
-    }
-
-    @Override
-    public int getValueOffset() {
-      throw new IllegalArgumentException("getValueOffset operation is not allowed");
-    }
-
-    @Override
-    public byte[] getTagsArray() {
-      throw new IllegalArgumentException("getTagsArray operation is not allowed");
-    }
-
-    @Override
-    public int getTagsOffset() {
-      throw new IllegalArgumentException("getTagsOffset operation is not allowed");
-    }
-  }
-
-  /**
-   * Convert list of KeyValues to byte buffer.
-   * @param keyValues list of KeyValues to be converted.
-   * @return buffer with content from key values
-   */
-  public static ByteBuffer convertKvToByteBuffer(List<KeyValue> keyValues,
-      boolean includesMemstoreTS) {
-    int totalSize = 0;
-    for (KeyValue kv : keyValues) {
-      totalSize += kv.getLength();
-      if (includesMemstoreTS) {
-        totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
-      }
-    }
-
-    ByteBuffer result = ByteBuffer.allocate(totalSize);
-    for (KeyValue kv : keyValues) {
-      result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
-      if (includesMemstoreTS) {
-        ByteBufferUtils.writeVLong(result, kv.getSequenceId());
-      }
-    }
-    return result;
-  }
-  
-  
-  /************************ get/set ***********************************/
-  
-  public RedundantKVGenerator setCommonPrefix(byte[] prefix){
-    this.commonPrefix = prefix;
-    return this;
-  }
-
-  public RedundantKVGenerator setRandomizer(Random randomizer) {
-    this.randomizer = randomizer;
-    return this;
-  }
-
-  public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) {
-    this.numberOfRowPrefixes = numberOfRowPrefixes;
-    return this;
-  }
-
-  public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) {
-    this.averagePrefixLength = averagePrefixLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) {
-    this.prefixLengthVariance = prefixLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) {
-    this.averageSuffixLength = averageSuffixLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) {
-    this.suffixLengthVariance = suffixLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setNumberOfRows(int numberOfRows) {
-    this.numberOfRows = numberOfRows;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) {
-    this.chanceForSameQualifier = chanceForSameQualifier;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) {
-    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
-    return this;
-  }
-
-  public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) {
-    this.averageQualifierLength = averageQualifierLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) {
-    this.qualifierLengthVariance = qualifierLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) {
-    this.columnFamilyLength = columnFamilyLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setFamily(byte[] family) {
-    this.family = family;
-    this.columnFamilyLength = family.length;
-    return this;
-  }
-
-  public RedundantKVGenerator setValueLength(int valueLength) {
-    this.valueLength = valueLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) {
-    this.chanceForZeroValue = chanceForZeroValue;
-    return this;
-  }
-
-  public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) {
-    this.baseTimestampDivide = baseTimestampDivide;
-    return this;
-  }
-
-  public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) {
-    this.timestampDiffSize = timestampDiffSize;
-    return this;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
new file mode 100644
index 0000000..2c4d897
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.Random;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * A generator of random keys and values for load testing. Keys are generated
+ * by converting numeric indexes to strings and prefixing them with an MD5
+ * hash. Values are generated by selecting value size in the configured range
+ * and generating a pseudo-random sequence of bytes seeded by key, column
+ * qualifier, and value size.
+ */
+@InterfaceAudience.Private
+public class LoadTestKVGenerator {
+
+  private static final Log LOG = LogFactory.getLog(LoadTestKVGenerator.class);
+  private static int logLimit = 10;
+
+  /** A random number generator for determining value size */
+  private Random randomForValueSize = new Random();
+
+  private final int minValueSize;
+  private final int maxValueSize;
+
+  public LoadTestKVGenerator(int minValueSize, int maxValueSize) {
+    if (minValueSize <= 0 || maxValueSize <= 0) {
+      throw new IllegalArgumentException("Invalid min/max value sizes: " +
+          minValueSize + ", " + maxValueSize);
+    }
+    this.minValueSize = minValueSize;
+    this.maxValueSize = maxValueSize;
+  }
+
+  /**
+   * Verifies that the given byte array is the same as what would be generated
+   * for the given seed strings (row/cf/column/...). We are assuming that the
+   * value size is correct, and only verify the actual bytes. However, if the
+   * min/max value sizes are set sufficiently high, an accidental match should be
+   * extremely improbable.
+   */
+  public static boolean verify(byte[] value, byte[]... seedStrings) {
+    byte[] expectedData = getValueForRowColumn(value.length, seedStrings);
+    boolean equals = Bytes.equals(expectedData, value);
+    if (!equals && LOG.isDebugEnabled() && logLimit > 0) {
+      LOG.debug("verify failed, expected value: " + Bytes.toStringBinary(expectedData)
+        + " actual value: "+ Bytes.toStringBinary(value));
+      logLimit--; // this is not thread safe, but at worst we will have more logging
+    }
+    return equals;
+  }
+
+  /**
+   * Converts the given key to string, and prefixes it with the MD5 hash of
+   * the index's string representation.
+   */
+  public static String md5PrefixedKey(long key) {
+    String stringKey = Long.toString(key);
+    String md5hash = MD5Hash.getMD5AsHex(Bytes.toBytes(stringKey));
+
+    // flip the key to randomize
+    return md5hash + "-" + stringKey;
+  }
+
+  /**
+   * Generates a value for the given key index and column qualifier. Size is
+   * selected randomly in the configured range. The generated value depends
+   * only on the combination of the strings passed (key/cf/column/...) and the selected
+   * value size. This allows to verify the actual value bytes when reading, as done
+   * in {#verify(byte[], byte[]...)}
+   * This method is as thread-safe as Random class. It appears that the worst bug ever
+   * found with the latter is that multiple threads will get some duplicate values, which
+   * we don't care about.
+   */
+  public byte[] generateRandomSizeValue(byte[]... seedStrings) {
+    int dataSize = minValueSize;
+    if(minValueSize != maxValueSize) {
+      dataSize = minValueSize + randomForValueSize.nextInt(Math.abs(maxValueSize - minValueSize));
+    }
+    return getValueForRowColumn(dataSize, seedStrings);
+  }
+
+  /**
+   * Generates random bytes of the given size for the given row and column
+   * qualifier. The random seed is fully determined by these parameters.
+   */
+  private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) {
+    long seed = dataSize;
+    for (byte[] str : seedStrings) {
+      final String bytesString = Bytes.toString(str);
+      if (bytesString != null) {
+        seed += bytesString.hashCode();
+      }
+    }
+    Random seededRandom = new Random(seed);
+    byte[] randomBytes = new byte[dataSize];
+    seededRandom.nextBytes(randomBytes);
+    return randomBytes;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
new file mode 100644
index 0000000..4f167a4
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
@@ -0,0 +1,563 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.hbase.ArrayBackedTag;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ByteBufferKeyValue;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.io.WritableUtils;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes;
+
+/**
+ * Generate list of key values which are very useful to test data block encoding
+ * and compression.
+ */
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+    value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
+    justification="Should probably fix")
+@InterfaceAudience.Private
+public class RedundantKVGenerator {
+  // row settings
+  static byte[] DEFAULT_COMMON_PREFIX = new byte[0];
+  static int DEFAULT_NUMBER_OF_ROW_PREFIXES = 10;
+  static int DEFAULT_AVERAGE_PREFIX_LENGTH = 6;
+  static int DEFAULT_PREFIX_LENGTH_VARIANCE = 3;
+  static int DEFAULT_AVERAGE_SUFFIX_LENGTH = 3;
+  static int DEFAULT_SUFFIX_LENGTH_VARIANCE = 3;
+  static int DEFAULT_NUMBER_OF_ROW = 500;
+
+  // qualifier
+  static float DEFAULT_CHANCE_FOR_SAME_QUALIFIER = 0.5f;
+  static float DEFAULT_CHANCE_FOR_SIMILIAR_QUALIFIER = 0.4f;
+  static int DEFAULT_AVERAGE_QUALIFIER_LENGTH = 9;
+  static int DEFAULT_QUALIFIER_LENGTH_VARIANCE = 3;
+
+  static int DEFAULT_COLUMN_FAMILY_LENGTH = 9;
+  static int DEFAULT_VALUE_LENGTH = 8;
+  static float DEFAULT_CHANCE_FOR_ZERO_VALUE = 0.5f;
+
+  static int DEFAULT_BASE_TIMESTAMP_DIVIDE = 1000000;
+  static int DEFAULT_TIMESTAMP_DIFF_SIZE = 100000000;
+
+  /**
+   * Default constructor, assumes all parameters from class constants.
+   */
+  public RedundantKVGenerator() {
+    this(new Random(42L),
+        DEFAULT_NUMBER_OF_ROW_PREFIXES,
+        DEFAULT_AVERAGE_PREFIX_LENGTH,
+        DEFAULT_PREFIX_LENGTH_VARIANCE,
+        DEFAULT_AVERAGE_SUFFIX_LENGTH,
+        DEFAULT_SUFFIX_LENGTH_VARIANCE,
+        DEFAULT_NUMBER_OF_ROW,
+
+        DEFAULT_CHANCE_FOR_SAME_QUALIFIER,
+        DEFAULT_CHANCE_FOR_SIMILIAR_QUALIFIER,
+        DEFAULT_AVERAGE_QUALIFIER_LENGTH,
+        DEFAULT_QUALIFIER_LENGTH_VARIANCE,
+
+        DEFAULT_COLUMN_FAMILY_LENGTH,
+        DEFAULT_VALUE_LENGTH,
+        DEFAULT_CHANCE_FOR_ZERO_VALUE,
+
+        DEFAULT_BASE_TIMESTAMP_DIVIDE,
+        DEFAULT_TIMESTAMP_DIFF_SIZE
+    );
+  }
+
+
+  /**
+   * Various configuration options for generating key values
+   * @param randomizer pick things by random
+   */
+  public RedundantKVGenerator(Random randomizer,
+      int numberOfRowPrefixes,
+      int averagePrefixLength,
+      int prefixLengthVariance,
+      int averageSuffixLength,
+      int suffixLengthVariance,
+      int numberOfRows,
+
+      float chanceForSameQualifier,
+      float chanceForSimiliarQualifier,
+      int averageQualifierLength,
+      int qualifierLengthVariance,
+
+      int columnFamilyLength,
+      int valueLength,
+      float chanceForZeroValue,
+
+      int baseTimestampDivide,
+      int timestampDiffSize
+      ) {
+    this.randomizer = randomizer;
+
+    this.commonPrefix = DEFAULT_COMMON_PREFIX;
+    this.numberOfRowPrefixes = numberOfRowPrefixes;
+    this.averagePrefixLength = averagePrefixLength;
+    this.prefixLengthVariance = prefixLengthVariance;
+    this.averageSuffixLength = averageSuffixLength;
+    this.suffixLengthVariance = suffixLengthVariance;
+    this.numberOfRows = numberOfRows;
+
+    this.chanceForSameQualifier = chanceForSameQualifier;
+    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
+    this.averageQualifierLength = averageQualifierLength;
+    this.qualifierLengthVariance = qualifierLengthVariance;
+
+    this.columnFamilyLength = columnFamilyLength;
+    this.valueLength = valueLength;
+    this.chanceForZeroValue = chanceForZeroValue;
+
+    this.baseTimestampDivide = baseTimestampDivide;
+    this.timestampDiffSize = timestampDiffSize;
+  }
+
+  /** Used to generate dataset */
+  private Random randomizer;
+
+  // row settings
+  private byte[] commonPrefix;//global prefix before rowPrefixes
+  private int numberOfRowPrefixes;
+  private int averagePrefixLength = 6;
+  private int prefixLengthVariance = 3;
+  private int averageSuffixLength = 3;
+  private int suffixLengthVariance = 3;
+  private int numberOfRows = 500;
+
+  //family
+  private byte[] family;
+
+  // qualifier
+  private float chanceForSameQualifier = 0.5f;
+  private float chanceForSimilarQualifier = 0.4f;
+  private int averageQualifierLength = 9;
+  private int qualifierLengthVariance = 3;
+
+  private int columnFamilyLength = 9;
+  private int valueLength = 8;
+  private float chanceForZeroValue = 0.5f;
+
+  private int baseTimestampDivide = 1000000;
+  private int timestampDiffSize = 100000000;
+
+  private List<byte[]> generateRows() {
+    // generate prefixes
+    List<byte[]> prefixes = new ArrayList<>();
+    prefixes.add(new byte[0]);
+    for (int i = 1; i < numberOfRowPrefixes; ++i) {
+      int prefixLength = averagePrefixLength;
+      prefixLength += randomizer.nextInt(2 * prefixLengthVariance + 1) -
+          prefixLengthVariance;
+      byte[] newPrefix = new byte[prefixLength];
+      randomizer.nextBytes(newPrefix);
+      byte[] newPrefixWithCommon = newPrefix;
+      prefixes.add(newPrefixWithCommon);
+    }
+
+    // generate rest of the row
+    List<byte[]> rows = new ArrayList<>();
+    for (int i = 0; i < numberOfRows; ++i) {
+      int suffixLength = averageSuffixLength;
+      suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
+          suffixLengthVariance;
+      int randomPrefix = randomizer.nextInt(prefixes.size());
+      byte[] row = new byte[prefixes.get(randomPrefix).length +
+                            suffixLength];
+      byte[] rowWithCommonPrefix = Bytes.concat(commonPrefix, row);
+      rows.add(rowWithCommonPrefix);
+    }
+
+    return rows;
+  }
+
+  /**
+   * Generate test data useful to test encoders.
+   * @param howMany How many Key values should be generated.
+   * @return sorted list of key values
+   */
+  public List<KeyValue> generateTestKeyValues(int howMany) {
+    return generateTestKeyValues(howMany, false);
+  }
+  /**
+   * Generate test data useful to test encoders.
+   * @param howMany How many Key values should be generated.
+   * @return sorted list of key values
+   */
+  public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
+    List<KeyValue> result = new ArrayList<>();
+
+    List<byte[]> rows = generateRows();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
+
+    if(family==null){
+      family = new byte[columnFamilyLength];
+      randomizer.nextBytes(family);
+    }
+
+    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
+
+    byte[] value = new byte[valueLength];
+
+    for (int i = 0; i < howMany; ++i) {
+      long timestamp = baseTimestamp;
+      if(timestampDiffSize > 0){
+        timestamp += randomizer.nextInt(timestampDiffSize);
+      }
+      Integer rowId = randomizer.nextInt(rows.size());
+      byte[] row = rows.get(rowId);
+
+      // generate qualifier, sometimes it is same, sometimes similar,
+      // occasionally completely different
+      byte[] qualifier;
+      float qualifierChance = randomizer.nextFloat();
+      if (!rowsToQualifier.containsKey(rowId)
+          || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
+        int qualifierLength = averageQualifierLength;
+        qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1)
+            - qualifierLengthVariance;
+        qualifier = new byte[qualifierLength];
+        randomizer.nextBytes(qualifier);
+
+        // add it to map
+        if (!rowsToQualifier.containsKey(rowId)) {
+          rowsToQualifier.put(rowId, new ArrayList<>());
+        }
+        rowsToQualifier.get(rowId).add(qualifier);
+      } else if (qualifierChance > chanceForSameQualifier) {
+        // similar qualifier
+        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
+        byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers
+            .size()));
+
+        qualifier = new byte[originalQualifier.length];
+        int commonPrefix = randomizer.nextInt(qualifier.length);
+        System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
+        for (int j = commonPrefix; j < qualifier.length; ++j) {
+          qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
+        }
+
+        rowsToQualifier.get(rowId).add(qualifier);
+      } else {
+        // same qualifier
+        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
+        qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
+      }
+
+      if (randomizer.nextFloat() < chanceForZeroValue) {
+        for (int j = 0; j < value.length; ++j) {
+          value[j] = (byte) 0;
+        }
+      } else {
+        randomizer.nextBytes(value);
+      }
+
+      if (useTags) {
+        result.add(new KeyValue(row, family, qualifier, timestamp, value,
+            new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
+      } else {
+        result.add(new KeyValue(row, family, qualifier, timestamp, value));
+      }
+    }
+
+    Collections.sort(result, CellComparator.COMPARATOR);
+
+    return result;
+  }
+
+  /**
+   * Generate test data useful to test encoders.
+   * @param howMany How many Key values should be generated.
+   * @return sorted list of key values
+   */
+  public List<Cell> generateTestExtendedOffheapKeyValues(int howMany, boolean useTags) {
+    List<Cell> result = new ArrayList<>();
+    List<byte[]> rows = generateRows();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
+
+    if (family == null) {
+      family = new byte[columnFamilyLength];
+      randomizer.nextBytes(family);
+    }
+
+    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
+
+    byte[] value = new byte[valueLength];
+
+    for (int i = 0; i < howMany; ++i) {
+      long timestamp = baseTimestamp;
+      if(timestampDiffSize > 0){
+        timestamp += randomizer.nextInt(timestampDiffSize);
+      }
+      Integer rowId = randomizer.nextInt(rows.size());
+      byte[] row = rows.get(rowId);
+
+      // generate qualifier, sometimes it is same, sometimes similar,
+      // occasionally completely different
+      byte[] qualifier;
+      float qualifierChance = randomizer.nextFloat();
+      if (!rowsToQualifier.containsKey(rowId)
+          || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
+        int qualifierLength = averageQualifierLength;
+        qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1)
+            - qualifierLengthVariance;
+        qualifier = new byte[qualifierLength];
+        randomizer.nextBytes(qualifier);
+
+        // add it to map
+        if (!rowsToQualifier.containsKey(rowId)) {
+          rowsToQualifier.put(rowId, new ArrayList<>());
+        }
+        rowsToQualifier.get(rowId).add(qualifier);
+      } else if (qualifierChance > chanceForSameQualifier) {
+        // similar qualifier
+        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
+        byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers
+            .size()));
+
+        qualifier = new byte[originalQualifier.length];
+        int commonPrefix = randomizer.nextInt(qualifier.length);
+        System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
+        for (int j = commonPrefix; j < qualifier.length; ++j) {
+          qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
+        }
+
+        rowsToQualifier.get(rowId).add(qualifier);
+      } else {
+        // same qualifier
+        List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
+        qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
+      }
+
+      if (randomizer.nextFloat() < chanceForZeroValue) {
+        for (int j = 0; j < value.length; ++j) {
+          value[j] = (byte) 0;
+        }
+      } else {
+        randomizer.nextBytes(value);
+      }
+      if (useTags) {
+        KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value,
+            new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
+        ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
+        ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
+          keyValue.getOffset(), keyValue.getLength());
+        ByteBufferKeyValue offheapKV =
+            new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
+        result.add(offheapKV);
+      } else {
+        KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value);
+        ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
+        ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
+          keyValue.getOffset(), keyValue.getLength());
+        ByteBufferKeyValue offheapKV =
+            new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
+        result.add(offheapKV);
+      }
+    }
+
+    Collections.sort(result, CellComparator.COMPARATOR);
+
+    return result;
+  }
+
+  static class ExtendedOffheapKeyValue extends ByteBufferKeyValue {
+    public ExtendedOffheapKeyValue(ByteBuffer buf, int offset, int length, long seqId) {
+      super(buf, offset, length, seqId);
+    }
+
+    @Override
+    public byte[] getRowArray() {
+      throw new IllegalArgumentException("getRowArray operation is not allowed");
+    }
+
+    @Override
+    public int getRowOffset() {
+      throw new IllegalArgumentException("getRowOffset operation is not allowed");
+    }
+
+    @Override
+    public byte[] getFamilyArray() {
+      throw new IllegalArgumentException("getFamilyArray operation is not allowed");
+    }
+
+    @Override
+    public int getFamilyOffset() {
+      throw new IllegalArgumentException("getFamilyOffset operation is not allowed");
+    }
+
+    @Override
+    public byte[] getQualifierArray() {
+      throw new IllegalArgumentException("getQualifierArray operation is not allowed");
+    }
+
+    @Override
+    public int getQualifierOffset() {
+      throw new IllegalArgumentException("getQualifierOffset operation is not allowed");
+    }
+
+    @Override
+    public byte[] getValueArray() {
+      throw new IllegalArgumentException("getValueArray operation is not allowed");
+    }
+
+    @Override
+    public int getValueOffset() {
+      throw new IllegalArgumentException("getValueOffset operation is not allowed");
+    }
+
+    @Override
+    public byte[] getTagsArray() {
+      throw new IllegalArgumentException("getTagsArray operation is not allowed");
+    }
+
+    @Override
+    public int getTagsOffset() {
+      throw new IllegalArgumentException("getTagsOffset operation is not allowed");
+    }
+  }
+
+  /**
+   * Convert list of KeyValues to byte buffer.
+   * @param keyValues list of KeyValues to be converted.
+   * @return buffer with content from key values
+   */
+  public static ByteBuffer convertKvToByteBuffer(List<KeyValue> keyValues,
+      boolean includesMemstoreTS) {
+    int totalSize = 0;
+    for (KeyValue kv : keyValues) {
+      totalSize += kv.getLength();
+      if (includesMemstoreTS) {
+        totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
+      }
+    }
+
+    ByteBuffer result = ByteBuffer.allocate(totalSize);
+    for (KeyValue kv : keyValues) {
+      result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
+      if (includesMemstoreTS) {
+        ByteBufferUtils.writeVLong(result, kv.getSequenceId());
+      }
+    }
+    return result;
+  }
+
+  /************************ get/set ***********************************/
+  public RedundantKVGenerator setCommonPrefix(byte[] prefix){
+    this.commonPrefix = prefix;
+    return this;
+  }
+
+  public RedundantKVGenerator setRandomizer(Random randomizer) {
+    this.randomizer = randomizer;
+    return this;
+  }
+
+  public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) {
+    this.numberOfRowPrefixes = numberOfRowPrefixes;
+    return this;
+  }
+
+  public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) {
+    this.averagePrefixLength = averagePrefixLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) {
+    this.prefixLengthVariance = prefixLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) {
+    this.averageSuffixLength = averageSuffixLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) {
+    this.suffixLengthVariance = suffixLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setNumberOfRows(int numberOfRows) {
+    this.numberOfRows = numberOfRows;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) {
+    this.chanceForSameQualifier = chanceForSameQualifier;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) {
+    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
+    return this;
+  }
+
+  public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) {
+    this.averageQualifierLength = averageQualifierLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) {
+    this.qualifierLengthVariance = qualifierLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) {
+    this.columnFamilyLength = columnFamilyLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setFamily(byte[] family) {
+    this.family = family;
+    this.columnFamilyLength = family.length;
+    return this;
+  }
+
+  public RedundantKVGenerator setValueLength(int valueLength) {
+    this.valueLength = valueLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) {
+    this.chanceForZeroValue = chanceForZeroValue;
+    return this;
+  }
+
+  public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) {
+    this.baseTimestampDivide = baseTimestampDivide;
+    return this;
+  }
+
+  public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) {
+    this.timestampDiffSize = timestampDiffSize;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
index cf74a3e..f5db4c1 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
@@ -25,7 +25,6 @@ import java.util.Set;
 
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
index 6efe9d8..5a22ab6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.MultiThreadedWriter;
 import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
index 073de77..2f10989 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.MultiThreadedReader;
 import org.apache.hadoop.hbase.util.MultiThreadedWriter;
 import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.Assert;
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java
index 77036be..0fdc616 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
 import org.apache.hadoop.hbase.util.byterange.ByteRangeSet;
 import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java
index 9ee9be8..7a58f2d 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java
@@ -22,7 +22,7 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
index 86bdf02..bd15d3e 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
@@ -21,7 +21,7 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 465bdfb..154f6eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.ipc.SimpleRpcScheduler;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index dc15bf5..45d1a36 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.nio.SingleByteBuff;
 import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
index 1b5c630..efdf765 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
index ac939d1..bfd3a2b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
 import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.ChecksumType;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index 9a24074..2dd0073 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+import org.apache.hadoop.hbase.util.RedundantKVGenerator;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 8ff41af..d733d2b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -40,10 +40,9 @@ import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
index 006316a..8aefd42 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util;
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
-
 /**
  * A load test data generator for MOB
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 6550baa..6bfb23e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
 import org.apache.hadoop.util.StringUtils;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a722c7ec/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
index 2deba00..48d6783 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
@@ -23,6 +23,7 @@ import java.util.Set;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 
 /**
  * A generator of random data (keys/cfs/columns/values) for load testing.