You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/11/04 17:10:01 UTC

[2/7] hbase git commit: HBASE-19179 Remove hbase-prefix-tree

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java
deleted file mode 100644
index 56c1f6a..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.row.data;
-
-import java.util.List;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
-import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Assert;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-public class TestRowDataTrivial extends BaseTestRowData{
-
-  static byte[]
-    rA = Bytes.toBytes("rA"),
-    rB = Bytes.toBytes("rB"),//turn "r" into a branch for the Searcher tests
-    cf = Bytes.toBytes("fam"),
-    cq0 = Bytes.toBytes("q0"),
-    v0 = Bytes.toBytes("v0");
-
-  static long ts = 55L;
-
-  static List<KeyValue> d = Lists.newArrayList();
-  static {
-    d.add(new KeyValue(rA, cf, cq0, ts, v0));
-    d.add(new KeyValue(rB, cf, cq0, ts, v0));
-  }
-
-  @Override
-  public List<KeyValue> getInputs() {
-    return d;
-  }
-
-  @Override
-  public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) {
-    // node[0] -> root[r]
-    // node[1] -> leaf[A], etc
-    Assert.assertEquals(2, blockMeta.getRowTreeDepth());
-  }
-
-  @Override
-  public void individualSearcherAssertions(CellSearcher searcher) {
-    /**
-     * The searcher should get a token mismatch on the "r" branch. Assert that it skips not only rA,
-     * but rB as well.
-     */
-    KeyValue afterLast = KeyValueUtil.createFirstOnRow(Bytes.toBytes("zzz"));
-    CellScannerPosition position = searcher.positionAtOrAfter(afterLast);
-    Assert.assertEquals(CellScannerPosition.AFTER_LAST, position);
-    Assert.assertNull(searcher.current());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
deleted file mode 100644
index 669f19a..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.codec.prefixtree.row.data;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.ArrayBackedTag;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
-import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Assert;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-public class TestRowDataTrivialWithTags extends BaseTestRowData{
-  static byte[] rA = Bytes.toBytes("rA"), rB = Bytes.toBytes("rB"),// turn "r"
-                                                                   // into a
-                                                                   // branch for
-                                                                   // the
-                                                                   // Searcher
-                                                                   // tests
-      cf = Bytes.toBytes("fam"), cq0 = Bytes.toBytes("q0"), v0 = Bytes.toBytes("v0");
-
-  static long ts = 55L;
-
-  static List<KeyValue> d = Lists.newArrayList();
-  static {
-    List<Tag> tagList = new ArrayList<>(2);
-    Tag t = new ArrayBackedTag((byte) 1, "visisbility");
-    tagList.add(t);
-    t = new ArrayBackedTag((byte) 2, "ACL");
-    tagList.add(t);
-    d.add(new KeyValue(rA, cf, cq0, ts, v0, tagList));
-    d.add(new KeyValue(rB, cf, cq0, ts, v0, tagList));
-  }
-
-  @Override
-  public List<KeyValue> getInputs() {
-    return d;
-  }
-
-  @Override
-  public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) {
-    // node[0] -> root[r]
-    // node[1] -> leaf[A], etc
-    Assert.assertEquals(2, blockMeta.getRowTreeDepth());
-  }
-
-  @Override
-  public void individualSearcherAssertions(CellSearcher searcher) {
-    /**
-     * The searcher should get a token mismatch on the "r" branch. Assert that
-     * it skips not only rA, but rB as well.
-     */
-    KeyValue afterLast = KeyValueUtil.createFirstOnRow(Bytes.toBytes("zzz"));
-    CellScannerPosition position = searcher.positionAtOrAfter(afterLast);
-    Assert.assertEquals(CellScannerPosition.AFTER_LAST, position);
-    Assert.assertNull(searcher.current());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
deleted file mode 100644
index 3ac7877..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.row.data;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
-import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.util.ByteRange;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
-import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-/*
- * test different timestamps
- * 
- * http://pastebin.com/7ks8kzJ2
- * http://pastebin.com/MPn03nsK
- */
-public class TestRowDataUrls extends BaseTestRowData{
-
-  static List<ByteRange> rows;
-  static{
-    List<String> rowStrings = new ArrayList<>(16);
-    rowStrings.add("com.edsBlog/directoryAa/pageAaa");
-    rowStrings.add("com.edsBlog/directoryAa/pageBbb");
-    rowStrings.add("com.edsBlog/directoryAa/pageCcc");
-    rowStrings.add("com.edsBlog/directoryAa/pageDdd");
-    rowStrings.add("com.edsBlog/directoryBb/pageEee");
-    rowStrings.add("com.edsBlog/directoryBb/pageFff");
-    rowStrings.add("com.edsBlog/directoryBb/pageGgg");
-    rowStrings.add("com.edsBlog/directoryBb/pageHhh");
-    rowStrings.add("com.isabellasBlog/directoryAa/pageAaa");
-    rowStrings.add("com.isabellasBlog/directoryAa/pageBbb");
-    rowStrings.add("com.isabellasBlog/directoryAa/pageCcc");
-    rowStrings.add("com.isabellasBlog/directoryAa/pageDdd");
-    rowStrings.add("com.isabellasBlog/directoryBb/pageEee");
-    rowStrings.add("com.isabellasBlog/directoryBb/pageFff");
-    rowStrings.add("com.isabellasBlog/directoryBb/pageGgg");
-    rowStrings.add("com.isabellasBlog/directoryBb/pageHhh");
-    ByteRangeTreeSet ba = new ByteRangeTreeSet();
-    for (String row : rowStrings) {
-      ba.add(new SimpleMutableByteRange(Bytes.toBytes(row)));
-    }
-    rows = ba.compile().getSortedRanges();
-  }
-
-  static List<String> cols = Lists.newArrayList();
-  static {
-    cols.add("Chrome");
-    cols.add("Chromeb");
-    cols.add("Firefox");
-    cols.add("InternetExplorer");
-    cols.add("Opera");
-    cols.add("Safari");
-  }
-
-  static long ts = 1234567890;
-
-  static int MAX_VALUE = 50;
-
-  static List<KeyValue> kvs = Lists.newArrayList();
-  static {
-    for (ByteRange row : rows) {
-      for (String col : cols) {
-        KeyValue kv = new KeyValue(row.deepCopyToNewArray(), PrefixTreeTestConstants.TEST_CF,
-            Bytes.toBytes(col), ts, KeyValue.Type.Put, Bytes.toBytes("VALUE"));
-        kvs.add(kv);
-        // System.out.println("TestRows5:"+kv);
-      }
-    }
-  }
-
-  @Override
-  public List<KeyValue> getInputs() {
-    return kvs;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java
deleted file mode 100644
index a0c2ee1..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.row.data;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueTestUtil;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnNodeWriter;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowNodeWriter;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
-import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-/*
- * test different timestamps
- * 
- * http://pastebin.com/7ks8kzJ2
- * http://pastebin.com/MPn03nsK
- */
-public class TestRowDataUrlsExample extends BaseTestRowData{
-
-  static String TENANT_ID = Integer.toString(95322);
-  static String APP_ID = Integer.toString(12);
-  static List<String> URLS = Lists.newArrayList(
-      "com.dablog/2011/10/04/boating", 
-      "com.dablog/2011/10/09/lasers", 
-      "com.jamiesrecipes", //this nub helped find a bug
-      "com.jamiesrecipes/eggs");
-  static String FAMILY = "hits";
-  static List<String> BROWSERS = Lists.newArrayList(
-      "Chrome", "IE8", "IE9beta");//, "Opera", "Safari");
-  static long TIMESTAMP = 1234567890;
-
-  static int MAX_VALUE = 50;
-
-  static List<KeyValue> kvs = Lists.newArrayList();
-  static{
-    for(String rowKey : URLS){
-      for(String qualifier : BROWSERS){
-        KeyValue kv = new KeyValue(
-            Bytes.toBytes(rowKey), 
-            Bytes.toBytes(FAMILY), 
-            Bytes.toBytes(qualifier), 
-            TIMESTAMP, 
-            KeyValue.Type.Put, 
-            Bytes.toBytes("VvvV"));
-        kvs.add(kv);
-      }
-    }
-  }
-
-  /**
-   * Used for generating docs.
-   */
-  public static void main(String... args) throws IOException{
-    System.out.println("-- inputs --");
-    System.out.println(KeyValueTestUtil.toStringWithPadding(kvs, true));
-    ByteArrayOutputStream os = new ByteArrayOutputStream(1<<20);
-    PrefixTreeEncoder encoder = new PrefixTreeEncoder(os, false);
-
-    for(KeyValue kv : kvs){
-      encoder.write(kv);
-    }
-    encoder.flush();
-
-    System.out.println("-- qualifier SortedPtBuilderNodes --");
-    for(TokenizerNode tokenizer : encoder.getQualifierWriter().getNonLeaves()){
-      System.out.println(tokenizer);
-    }
-    for(TokenizerNode tokenizerNode : encoder.getQualifierWriter().getLeaves()){
-      System.out.println(tokenizerNode);
-    }
-
-    System.out.println("-- qualifier PtColumnNodeWriters --");
-    for(ColumnNodeWriter writer : encoder.getQualifierWriter().getColumnNodeWriters()){
-      System.out.println(writer);
-    }
-
-    System.out.println("-- rowKey SortedPtBuilderNodes --");
-    for(TokenizerNode tokenizerNode : encoder.getRowWriter().getNonLeaves()){
-      System.out.println(tokenizerNode);
-    }
-    for(TokenizerNode tokenizerNode : encoder.getRowWriter().getLeaves()){
-      System.out.println(tokenizerNode);
-    }
-
-    System.out.println("-- row PtRowNodeWriters --");
-    for(RowNodeWriter writer : encoder.getRowWriter().getNonLeafWriters()){
-      System.out.println(writer);
-    }
-    for(RowNodeWriter writer : encoder.getRowWriter().getLeafWriters()){
-      System.out.println(writer);
-    }
-
-    System.out.println("-- concatenated values --");
-    System.out.println(Bytes.toStringBinary(encoder.getValueByteRange().deepCopyToNewArray()));
-  }
-
-  @Override
-  public List<KeyValue> getInputs() {
-    return kvs;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java
deleted file mode 100644
index 00704f4..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.timestamp;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataBasic;
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataNumbers;
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataRepeats;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
-public interface TestTimestampData {
-
-  List<Long> getInputs();
-  long getMinimum();
-  List<Long> getOutputs();
-
-  class InMemory {
-    public Collection<Object[]> getAllAsObjectArray() {
-      List<Object[]> all = Lists.newArrayList();
-      all.add(new Object[] { new TestTimestampDataBasic() });
-      all.add(new Object[] { new TestTimestampDataNumbers() });
-      all.add(new Object[] { new TestTimestampDataRepeats() });
-      return all;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java
deleted file mode 100644
index 1d0ad1b..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.timestamp;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Collection;
-
-import org.apache.hadoop.hbase.nio.SingleByteBuff;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
-import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-@Category({MiscTests.class, SmallTests.class})
-@RunWith(Parameterized.class)
-public class TestTimestampEncoder {
-
-  @Parameters
-  public static Collection<Object[]> parameters() {
-    return new TestTimestampData.InMemory().getAllAsObjectArray();
-  }
-
-  private TestTimestampData timestamps;
-  private PrefixTreeBlockMeta blockMeta;
-  private LongEncoder encoder;
-  private byte[] bytes;
-  private TimestampDecoder decoder;
-
-  public TestTimestampEncoder(TestTimestampData testTimestamps) throws IOException {
-    this.timestamps = testTimestamps;
-    this.blockMeta = new PrefixTreeBlockMeta();
-    this.blockMeta.setNumMetaBytes(0);
-    this.blockMeta.setNumRowBytes(0);
-    this.blockMeta.setNumQualifierBytes(0);
-    this.encoder = new LongEncoder();
-    for (Long ts : testTimestamps.getInputs()) {
-      encoder.add(ts);
-    }
-    encoder.compile();
-    blockMeta.setTimestampFields(encoder);
-    bytes = encoder.getByteArray();
-    decoder = new TimestampDecoder();
-    decoder.initOnBlock(blockMeta, new SingleByteBuff(ByteBuffer.wrap(bytes)));
-  }
-
-  @Test
-  public void testCompressorMinimum() {
-    Assert.assertEquals(timestamps.getMinimum(), encoder.getMin());
-  }
-
-  @Test
-  public void testCompressorRoundTrip() {
-    long[] outputs = encoder.getSortedUniqueTimestamps();
-    for (int i = 0; i < timestamps.getOutputs().size(); ++i) {
-      long input = timestamps.getOutputs().get(i);
-      long output = outputs[i];
-      Assert.assertEquals(input, output);
-    }
-  }
-
-  @Test
-  public void testReaderMinimum() {
-    Assert.assertEquals(timestamps.getMinimum(), decoder.getLong(0));
-  }
-
-  @Test
-  public void testReaderRoundTrip() {
-    for (int i = 0; i < timestamps.getOutputs().size(); ++i) {
-      long input = timestamps.getOutputs().get(i);
-      long output = decoder.getLong(i);
-      Assert.assertEquals(input, output);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
deleted file mode 100644
index d4fbb4d..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
-
-public class TestTimestampDataBasic implements TestTimestampData {
-
-  @Override
-  public List<Long> getInputs() {
-    List<Long> d = new ArrayList<>(5);
-    d.add(5L);
-    d.add(3L);
-    d.add(0L);
-    d.add(1L);
-    d.add(3L);
-    return d;
-  }
-
-  @Override
-  public long getMinimum() {
-    return 0L;
-  }
-
-  @Override
-  public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<>(4);
-    d.add(0L);
-    d.add(1L);
-    d.add(3L);
-    d.add(5L);
-    return d;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
deleted file mode 100644
index d0bc837..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
-
-public class TestTimestampDataNumbers implements TestTimestampData {
-
-  private int shift = 8;
-
-  @Override
-  public List<Long> getInputs() {
-    List<Long> d = new ArrayList<>(5);
-    d.add(5L << shift);
-    d.add(3L << shift);
-    d.add(7L << shift);
-    d.add(1L << shift);
-    d.add(3L << shift);
-    return d;
-  }
-
-  @Override
-  public long getMinimum() {
-    return 1L << shift;
-  }
-
-  @Override
-  public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<>(4);
-    d.add(1L << shift);
-    d.add(3L << shift);
-    d.add(5L << shift);
-    d.add(7L << shift);
-    return d;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
deleted file mode 100644
index 3320d66..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
-
-public class TestTimestampDataRepeats implements TestTimestampData {
-
-  private static long t = 1234567890L;
-
-  @Override
-  public List<Long> getInputs() {
-    List<Long> d = new ArrayList<>(5);
-    d.add(t);
-    d.add(t);
-    d.add(t);
-    d.add(t);
-    d.add(t);
-    return d;
-  }
-
-  @Override
-  public long getMinimum() {
-    return t;
-  }
-
-  @Override
-  public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<>();
-    return d;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/bytes/TestByteRange.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/bytes/TestByteRange.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/bytes/TestByteRange.java
deleted file mode 100644
index 028d604..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/bytes/TestByteRange.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.bytes;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.ByteRange;
-import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestByteRange {
-
-  @Test
-  public void testConstructor() {
-    ByteRange b = new SimpleMutableByteRange(new byte[] { 0, 1, 2 });
-    Assert.assertEquals(3, b.getLength());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/comparator/ByteArrayComparator.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/comparator/ByteArrayComparator.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/comparator/ByteArrayComparator.java
deleted file mode 100644
index 9a81d90..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/comparator/ByteArrayComparator.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.comparator;
-
-import java.util.Comparator;
-
-import org.apache.hadoop.hbase.util.Bytes;
-
-public class ByteArrayComparator implements Comparator<byte[]> {
-
-  @Override
-  public int compare(byte[] a, byte[] b) {
-    return Bytes.compareTo(a, b);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/NumberFormatter.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/NumberFormatter.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/NumberFormatter.java
deleted file mode 100644
index 4aaea61..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/NumberFormatter.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.number;
-
-import java.text.DecimalFormat;
-
-public class NumberFormatter {
-
-  public static String addCommas(final Number pValue) {
-    if (pValue == null) {
-      return null;
-    }
-    String format = "###,###,###,###,###,###,###,###.#####################";
-    return new DecimalFormat(format).format(pValue);// biggest is 19 digits
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/RandomNumberUtils.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/RandomNumberUtils.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/RandomNumberUtils.java
deleted file mode 100644
index f2f06e6..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/number/RandomNumberUtils.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.number;
-
-import java.util.Random;
-
-public class RandomNumberUtils {
-
-  public static long nextPositiveLong(Random random) {
-    while (true) {
-      long value = random.nextLong();
-      if (value > 0) {
-        return value;
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestFIntTool.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestFIntTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestFIntTool.java
deleted file mode 100644
index 4d12335..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestFIntTool.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.vint;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/********************** tests *************************/
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestFIntTool {
-  @Test
-  public void testLeadingZeros() {
-    Assert.assertEquals(64, Long.numberOfLeadingZeros(0));
-    Assert.assertEquals(63, Long.numberOfLeadingZeros(1));
-    Assert.assertEquals(0, Long.numberOfLeadingZeros(Long.MIN_VALUE));
-    Assert.assertEquals(0, Long.numberOfLeadingZeros(-1));
-    Assert.assertEquals(1, Long.numberOfLeadingZeros(Long.MAX_VALUE));
-    Assert.assertEquals(1, Long.numberOfLeadingZeros(Long.MAX_VALUE - 1));
-  }
-
-  @Test
-  public void testMaxValueForNumBytes() {
-    Assert.assertEquals(255, UFIntTool.maxValueForNumBytes(1));
-    Assert.assertEquals(65535, UFIntTool.maxValueForNumBytes(2));
-    Assert.assertEquals(0xffffff, UFIntTool.maxValueForNumBytes(3));
-    Assert.assertEquals(0xffffffffffffffL, UFIntTool.maxValueForNumBytes(7));
-  }
-
-  @Test
-  public void testNumBytes() {
-    Assert.assertEquals(1, UFIntTool.numBytes(0));
-    Assert.assertEquals(1, UFIntTool.numBytes(1));
-    Assert.assertEquals(1, UFIntTool.numBytes(255));
-    Assert.assertEquals(2, UFIntTool.numBytes(256));
-    Assert.assertEquals(2, UFIntTool.numBytes(65535));
-    Assert.assertEquals(3, UFIntTool.numBytes(65536));
-    Assert.assertEquals(4, UFIntTool.numBytes(0xffffffffL));
-    Assert.assertEquals(5, UFIntTool.numBytes(0x100000000L));
-    Assert.assertEquals(4, UFIntTool.numBytes(Integer.MAX_VALUE));
-    Assert.assertEquals(8, UFIntTool.numBytes(Long.MAX_VALUE));
-    Assert.assertEquals(8, UFIntTool.numBytes(Long.MAX_VALUE - 1));
-  }
-
-  @Test
-  public void testGetBytes() {
-    Assert.assertArrayEquals(new byte[] { 0 }, UFIntTool.getBytes(1, 0));
-    Assert.assertArrayEquals(new byte[] { 1 }, UFIntTool.getBytes(1, 1));
-    Assert.assertArrayEquals(new byte[] { -1 }, UFIntTool.getBytes(1, 255));
-    Assert.assertArrayEquals(new byte[] { 1, 0 }, UFIntTool.getBytes(2, 256));
-    Assert.assertArrayEquals(new byte[] { 1, 3 }, UFIntTool.getBytes(2, 256 + 3));
-    Assert.assertArrayEquals(new byte[] { 1, -128 }, UFIntTool.getBytes(2, 256 + 128));
-    Assert.assertArrayEquals(new byte[] { 1, -1 }, UFIntTool.getBytes(2, 256 + 255));
-    Assert.assertArrayEquals(new byte[] { 127, -1, -1, -1 },
-      UFIntTool.getBytes(4, Integer.MAX_VALUE));
-    Assert.assertArrayEquals(new byte[] { 127, -1, -1, -1, -1, -1, -1, -1 },
-      UFIntTool.getBytes(8, Long.MAX_VALUE));
-  }
-
-  @Test
-  public void testFromBytes() {
-    Assert.assertEquals(0, UFIntTool.fromBytes(new byte[] { 0 }));
-    Assert.assertEquals(1, UFIntTool.fromBytes(new byte[] { 1 }));
-    Assert.assertEquals(255, UFIntTool.fromBytes(new byte[] { -1 }));
-    Assert.assertEquals(256, UFIntTool.fromBytes(new byte[] { 1, 0 }));
-    Assert.assertEquals(256 + 3, UFIntTool.fromBytes(new byte[] { 1, 3 }));
-    Assert.assertEquals(256 + 128, UFIntTool.fromBytes(new byte[] { 1, -128 }));
-    Assert.assertEquals(256 + 255, UFIntTool.fromBytes(new byte[] { 1, -1 }));
-    Assert.assertEquals(Integer.MAX_VALUE, UFIntTool.fromBytes(new byte[] { 127, -1, -1, -1 }));
-    Assert.assertEquals(Long.MAX_VALUE,
-      UFIntTool.fromBytes(new byte[] { 127, -1, -1, -1, -1, -1, -1, -1 }));
-  }
-
-  @Test
-  public void testRoundTrips() {
-    long[] values = new long[] { 0, 1, 2, 255, 256, 31123, 65535, 65536, 65537, 0xfffffeL,
-        0xffffffL, 0x1000000L, 0x1000001L, Integer.MAX_VALUE - 1, Integer.MAX_VALUE,
-        (long) Integer.MAX_VALUE + 1, Long.MAX_VALUE - 1, Long.MAX_VALUE };
-    for (int i = 0; i < values.length; ++i) {
-      Assert.assertEquals(values[i], UFIntTool.fromBytes(UFIntTool.getBytes(8, values[i])));
-    }
-  }
-
-  @Test
-  public void testWriteBytes() throws IOException {// copied from testGetBytes
-    Assert.assertArrayEquals(new byte[] { 0 }, bytesViaOutputStream(1, 0));
-    Assert.assertArrayEquals(new byte[] { 1 }, bytesViaOutputStream(1, 1));
-    Assert.assertArrayEquals(new byte[] { -1 }, bytesViaOutputStream(1, 255));
-    Assert.assertArrayEquals(new byte[] { 1, 0 }, bytesViaOutputStream(2, 256));
-    Assert.assertArrayEquals(new byte[] { 1, 3 }, bytesViaOutputStream(2, 256 + 3));
-    Assert.assertArrayEquals(new byte[] { 1, -128 }, bytesViaOutputStream(2, 256 + 128));
-    Assert.assertArrayEquals(new byte[] { 1, -1 }, bytesViaOutputStream(2, 256 + 255));
-    Assert.assertArrayEquals(new byte[] { 127, -1, -1, -1 },
-      bytesViaOutputStream(4, Integer.MAX_VALUE));
-    Assert.assertArrayEquals(new byte[] { 127, -1, -1, -1, -1, -1, -1, -1 },
-      bytesViaOutputStream(8, Long.MAX_VALUE));
-  }
-
-  private byte[] bytesViaOutputStream(int outputWidth, long value) throws IOException {
-    ByteArrayOutputStream os = new ByteArrayOutputStream();
-    UFIntTool.writeBytes(outputWidth, value, os);
-    return os.toByteArray();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java
deleted file mode 100644
index 9171619..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.vint;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Random;
-
-import org.apache.hadoop.hbase.nio.SingleByteBuff;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestVIntTool {
-
-  @Test
-  public void testNumBytes() {
-    Assert.assertEquals(1, UVIntTool.numBytes(0));
-    Assert.assertEquals(1, UVIntTool.numBytes(1));
-    Assert.assertEquals(1, UVIntTool.numBytes(100));
-    Assert.assertEquals(1, UVIntTool.numBytes(126));
-    Assert.assertEquals(1, UVIntTool.numBytes(127));
-    Assert.assertEquals(2, UVIntTool.numBytes(128));
-    Assert.assertEquals(2, UVIntTool.numBytes(129));
-    Assert.assertEquals(5, UVIntTool.numBytes(Integer.MAX_VALUE));
-  }
-
-  @Test
-  public void testWriteBytes() throws IOException {
-    Assert.assertArrayEquals(new byte[] { 0 }, bytesViaOutputStream(0));
-    Assert.assertArrayEquals(new byte[] { 1 }, bytesViaOutputStream(1));
-    Assert.assertArrayEquals(new byte[] { 63 }, bytesViaOutputStream(63));
-    Assert.assertArrayEquals(new byte[] { 127 }, bytesViaOutputStream(127));
-    Assert.assertArrayEquals(new byte[] { -128, 1 }, bytesViaOutputStream(128));
-    Assert.assertArrayEquals(new byte[] { -128 + 27, 1 }, bytesViaOutputStream(155));
-    Assert.assertArrayEquals(UVIntTool.MAX_VALUE_BYTES, bytesViaOutputStream(Integer.MAX_VALUE));
-  }
-
-  private byte[] bytesViaOutputStream(int value) throws IOException {
-    ByteArrayOutputStream os = new ByteArrayOutputStream();
-    UVIntTool.writeBytes(value, os);
-    return os.toByteArray();
-  }
-
-  @Test
-  public void testToBytes() {
-    Assert.assertArrayEquals(new byte[] { 0 }, UVIntTool.getBytes(0));
-    Assert.assertArrayEquals(new byte[] { 1 }, UVIntTool.getBytes(1));
-    Assert.assertArrayEquals(new byte[] { 63 }, UVIntTool.getBytes(63));
-    Assert.assertArrayEquals(new byte[] { 127 }, UVIntTool.getBytes(127));
-    Assert.assertArrayEquals(new byte[] { -128, 1 }, UVIntTool.getBytes(128));
-    Assert.assertArrayEquals(new byte[] { -128 + 27, 1 }, UVIntTool.getBytes(155));
-    Assert.assertArrayEquals(UVIntTool.MAX_VALUE_BYTES, UVIntTool.getBytes(Integer.MAX_VALUE));
-  }
-
-  @Test
-  public void testFromBytes() {
-    Assert.assertEquals(Integer.MAX_VALUE,
-      UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(UVIntTool.MAX_VALUE_BYTES)), 0));
-  }
-
-  @Test
-  public void testRoundTrips() {
-    Random random = new Random();
-    for (int i = 0; i < 10000; ++i) {
-      int value = random.nextInt(Integer.MAX_VALUE);
-      byte[] bytes = UVIntTool.getBytes(value);
-      int roundTripped = UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0);
-      Assert.assertEquals(value, roundTripped);
-    }
-  }
-
-  @Test
-  public void testInputStreams() throws IOException {
-    ByteArrayInputStream is;
-    is = new ByteArrayInputStream(new byte[] { 0 });
-    Assert.assertEquals(0, UVIntTool.getInt(is));
-    is = new ByteArrayInputStream(new byte[] { 5 });
-    Assert.assertEquals(5, UVIntTool.getInt(is));
-    is = new ByteArrayInputStream(new byte[] { -128 + 27, 1 });
-    Assert.assertEquals(155, UVIntTool.getInt(is));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java
deleted file mode 100644
index 247dee0..0000000
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.util.vint;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Random;
-
-import org.apache.hadoop.hbase.nio.SingleByteBuff;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.number.RandomNumberUtils;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestVLongTool {
-
-  @Test
-  public void testNumBytes() {
-    Assert.assertEquals(1, UVLongTool.numBytes(0));
-    Assert.assertEquals(1, UVLongTool.numBytes(1));
-    Assert.assertEquals(1, UVLongTool.numBytes(100));
-    Assert.assertEquals(1, UVLongTool.numBytes(126));
-    Assert.assertEquals(1, UVLongTool.numBytes(127));
-    Assert.assertEquals(2, UVLongTool.numBytes(128));
-    Assert.assertEquals(2, UVLongTool.numBytes(129));
-    Assert.assertEquals(9, UVLongTool.numBytes(Long.MAX_VALUE));
-  }
-
-  @Test
-  public void testToBytes() {
-    Assert.assertArrayEquals(new byte[] { 0 }, UVLongTool.getBytes(0));
-    Assert.assertArrayEquals(new byte[] { 1 }, UVLongTool.getBytes(1));
-    Assert.assertArrayEquals(new byte[] { 63 }, UVLongTool.getBytes(63));
-    Assert.assertArrayEquals(new byte[] { 127 }, UVLongTool.getBytes(127));
-    Assert.assertArrayEquals(new byte[] { -128, 1 }, UVLongTool.getBytes(128));
-    Assert.assertArrayEquals(new byte[] { -128 + 27, 1 }, UVLongTool.getBytes(155));
-    Assert.assertArrayEquals(UVLongTool.MAX_VALUE_BYTES, UVLongTool.getBytes(Long.MAX_VALUE));
-  }
-
-  @Test
-  public void testFromBytes() {
-    Assert.assertEquals(Long.MAX_VALUE, UVLongTool.getLong(UVLongTool.MAX_VALUE_BYTES));
-  }
-
-  @Test
-  public void testFromBytesOffset() {
-    Assert.assertEquals(Long.MAX_VALUE,
-      UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(UVLongTool.MAX_VALUE_BYTES)), 0));
-
-    long ms = 1318966363481L;
-//    System.out.println(ms);
-    byte[] bytes = UVLongTool.getBytes(ms);
-//    System.out.println(Arrays.toString(bytes));
-    long roundTripped = UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0);
-    Assert.assertEquals(ms, roundTripped);
-
-    int calculatedNumBytes = UVLongTool.numBytes(ms);
-    int actualNumBytes = bytes.length;
-    Assert.assertEquals(actualNumBytes, calculatedNumBytes);
-
-    byte[] shiftedBytes = new byte[1000];
-    int shift = 33;
-    System.arraycopy(bytes, 0, shiftedBytes, shift, bytes.length);
-    long shiftedRoundTrip =
-        UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(shiftedBytes)), shift);
-    Assert.assertEquals(ms, shiftedRoundTrip);
-  }
-
-  @Test
-  public void testRoundTrips() {
-    Random random = new Random();
-    for (int i = 0; i < 10000; ++i) {
-      long value = RandomNumberUtils.nextPositiveLong(random);
-      byte[] bytes = UVLongTool.getBytes(value);
-      long roundTripped = UVLongTool.getLong(bytes);
-      Assert.assertEquals(value, roundTripped);
-      int calculatedNumBytes = UVLongTool.numBytes(value);
-      int actualNumBytes = bytes.length;
-      Assert.assertEquals(actualNumBytes, calculatedNumBytes);
-    }
-  }
-
-  @Test
-  public void testInputStreams() throws IOException {
-    ByteArrayInputStream is;
-    is = new ByteArrayInputStream(new byte[] { 0 });
-    Assert.assertEquals(0, UVLongTool.getLong(is));
-    is = new ByteArrayInputStream(new byte[] { 5 });
-    Assert.assertEquals(5, UVLongTool.getLong(is));
-    is = new ByteArrayInputStream(new byte[] { -128 + 27, 1 });
-    Assert.assertEquals(155, UVLongTool.getLong(is));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-prefix-tree/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/resources/log4j.properties b/hbase-prefix-tree/src/test/resources/log4j.properties
deleted file mode 100644
index c322699..0000000
--- a/hbase-prefix-tree/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index eb537fb..6b58b9a 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -394,11 +394,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-prefix-tree</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-common</artifactId>
       <type>test-jar</type>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
index bd98cdd..e82b2bb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
@@ -792,11 +792,7 @@ public class HFileWriterImpl implements HFile.Writer {
     int avgValueLen =
         entryCount == 0 ? 0 : (int) (totalValueLength / entryCount);
     fileInfo.append(FileInfo.AVG_VALUE_LEN, Bytes.toBytes(avgValueLen), false);
-    if (hFileContext.getDataBlockEncoding() == DataBlockEncoding.PREFIX_TREE) {
-      // In case of Prefix Tree encoding, we always write tags information into HFiles even if all
-      // KVs are having no tags.
-      fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);
-    } else if (hFileContext.isIncludesTags()) {
+    if (hFileContext.isIncludesTags()) {
       // When tags are not being written in this file, MAX_TAGS_LEN is excluded
       // from the FileInfo
       fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index c8eea98..804f821 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -85,7 +85,6 @@ import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.filter.SubstringComparator;
 import org.apache.hadoop.hbase.filter.ValueFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
@@ -218,7 +217,6 @@ public class TestFromClientSide {
      final byte[] T3 = Bytes.toBytes("T3");
      HColumnDescriptor hcd = new HColumnDescriptor(FAMILY)
          .setKeepDeletedCells(KeepDeletedCells.TRUE)
-         .setDataBlockEncoding(DataBlockEncoding.PREFIX_TREE)
          .setMaxVersions(3);
 
      HTableDescriptor desc = new HTableDescriptor(tableName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index 08edf06..27fd46d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -42,9 +42,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.PrivateCellUtil;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeSeeker;
 import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
@@ -194,9 +192,6 @@ public class TestDataBlockEncoders {
     List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
     for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
       LOG.info("Encoding: " + encoding);
-      // Off heap block data support not added for PREFIX_TREE DBE yet.
-      // TODO remove this once support is added. HBASE-12298
-      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
       DataBlockEncoder encoder = encoding.getEncoder();
       if (encoder == null) {
         continue;
@@ -271,9 +266,6 @@ public class TestDataBlockEncoders {
     List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
 
     for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
-      // Off heap block data support not added for PREFIX_TREE DBE yet.
-      // TODO remove this once support is added. HBASE-12298
-      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
       if (encoding.getEncoder() == null) {
         continue;
       }
@@ -317,9 +309,6 @@ public class TestDataBlockEncoders {
     List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
 
     for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
-      // Off heap block data support not added for PREFIX_TREE DBE yet.
-      // TODO remove this once support is added. HBASE-12298
-      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
       if (encoding.getEncoder() == null) {
         continue;
       }
@@ -346,12 +335,7 @@ public class TestDataBlockEncoders {
 
       Cell actualKeyValue = seeker.getCell();
       ByteBuffer actualKey = null;
-      if (seeker instanceof PrefixTreeSeeker) {
-        byte[] serializedKey = PrivateCellUtil.getCellKeySerializedAsKeyValueKey(seeker.getKey());
-        actualKey = ByteBuffer.wrap(KeyValueUtil.createKeyValueFromKey(serializedKey).getKey());
-      } else {
-        actualKey = ByteBuffer.wrap(((KeyValue) seeker.getKey()).getKey());
-      }
+      actualKey = ByteBuffer.wrap(((KeyValue) seeker.getKey()).getKey());
       ByteBuffer actualValue = seeker.getValueShallowCopy();
 
       if (expectedKeyValue != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
deleted file mode 100644
index 273f82d..0000000
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.io.encoding;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.RegionScanner;
-import org.apache.hadoop.hbase.testclassification.IOTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({ IOTests.class, SmallTests.class })
-public class TestPrefixTree {
-
-  private static final String row4 = "a-b-B-2-1402397300-1402416535";
-  private static final byte[] row4_bytes = Bytes.toBytes(row4);
-  private static final String row3 = "a-b-A-1-1402397227-1402415999";
-  private static final byte[] row3_bytes = Bytes.toBytes(row3);
-  private static final String row2 = "a-b-A-1-1402329600-1402396277";
-  private static final byte[] row2_bytes = Bytes.toBytes(row2);
-  private static final String row1 = "a-b-A-1";
-  private static final byte[] row1_bytes = Bytes.toBytes(row1);
-
-  private final static byte[] fam = Bytes.toBytes("cf_1");
-  private final static byte[] qual1 = Bytes.toBytes("qf_1");
-  private final static byte[] qual2 = Bytes.toBytes("qf_2");
-
-  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
-
-  private HRegion region;
-
-  @Before
-  public void setUp() throws Exception {
-    TableName tableName = TableName.valueOf(getClass().getSimpleName());
-    HTableDescriptor htd = new HTableDescriptor(tableName);
-    htd.addFamily(new HColumnDescriptor(fam).setDataBlockEncoding(DataBlockEncoding.PREFIX_TREE));
-    HRegionInfo info = new HRegionInfo(tableName, null, null, false);
-    Path path = testUtil.getDataTestDir(getClass().getSimpleName());
-    region = HBaseTestingUtility.createRegionAndWAL(info, path, testUtil.getConfiguration(), htd);
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    HBaseTestingUtility.closeRegionAndWAL(region);
-    testUtil.cleanupTestDir();
-  }
-
-  @Test
-  public void testHBASE11728() throws Exception {
-    Put put = new Put(Bytes.toBytes("a-b-0-0"));
-    put.addColumn(fam, qual1, Bytes.toBytes("c1-value"));
-    region.put(put);
-    put = new Put(row1_bytes);
-    put.addColumn(fam, qual1, Bytes.toBytes("c1-value"));
-    region.put(put);
-    put = new Put(row2_bytes);
-    put.addColumn(fam, qual2, Bytes.toBytes("c2-value"));
-    region.put(put);
-    put = new Put(row3_bytes);
-    put.addColumn(fam, qual2, Bytes.toBytes("c2-value-2"));
-    region.put(put);
-    put = new Put(row4_bytes);
-    put.addColumn(fam, qual2, Bytes.toBytes("c2-value-3"));
-    region.put(put);
-    region.flush(true);
-    String[] rows = new String[3];
-    rows[0] = row1;
-    rows[1] = row2;
-    rows[2] = row3;
-    byte[][] val = new byte[3][];
-    val[0] = Bytes.toBytes("c1-value");
-    val[1] = Bytes.toBytes("c2-value");
-    val[2] = Bytes.toBytes("c2-value-2");
-    Scan scan = new Scan();
-    scan.setStartRow(row1_bytes);
-    scan.setStopRow(Bytes.toBytes("a-b-A-1:"));
-
-    RegionScanner scanner = region.getScanner(scan);
-    List<Cell> cells = new ArrayList<>();
-    for (int i = 0; i < 3; i++) {
-      assertEquals(i < 2, scanner.next(cells));
-      CellScanner cellScanner = Result.create(cells).cellScanner();
-      while (cellScanner.advance()) {
-        assertEquals(rows[i], Bytes.toString(cellScanner.current().getRowArray(), cellScanner
-            .current().getRowOffset(), cellScanner.current().getRowLength()));
-        assertEquals(Bytes.toString(val[i]), Bytes.toString(cellScanner.current().getValueArray(),
-          cellScanner.current().getValueOffset(), cellScanner.current().getValueLength()));
-      }
-      cells.clear();
-    }
-    scanner.close();
-
-    // Add column
-    scan = new Scan();
-    scan.addColumn(fam, qual2);
-    scan.setStartRow(row1_bytes);
-    scan.setStopRow(Bytes.toBytes("a-b-A-1:"));
-    scanner = region.getScanner(scan);
-    for (int i = 1; i < 3; i++) {
-      assertEquals(i < 2, scanner.next(cells));
-      CellScanner cellScanner = Result.create(cells).cellScanner();
-      while (cellScanner.advance()) {
-        assertEquals(rows[i], Bytes.toString(cellScanner.current().getRowArray(), cellScanner
-            .current().getRowOffset(), cellScanner.current().getRowLength()));
-      }
-      cells.clear();
-    }
-    scanner.close();
-
-    scan = new Scan();
-    scan.addColumn(fam, qual2);
-    scan.setStartRow(Bytes.toBytes("a-b-A-1-"));
-    scan.setStopRow(Bytes.toBytes("a-b-A-1:"));
-    scanner = region.getScanner(scan);
-    for (int i = 1; i < 3; i++) {
-      assertEquals(i < 2, scanner.next(cells));
-      CellScanner cellScanner = Result.create(cells).cellScanner();
-      while (cellScanner.advance()) {
-        assertEquals(rows[i], Bytes.toString(cellScanner.current().getRowArray(), cellScanner
-            .current().getRowOffset(), cellScanner.current().getRowLength()));
-      }
-      cells.clear();
-    }
-    scanner.close();
-
-    scan = new Scan();
-    scan.addColumn(fam, qual2);
-    scan.setStartRow(Bytes.toBytes("a-b-A-1-140239"));
-    scan.setStopRow(Bytes.toBytes("a-b-A-1:"));
-    scanner = region.getScanner(scan);
-    assertFalse(scanner.next(cells));
-    assertFalse(cells.isEmpty());
-    scanner.close();
-  }
-
-  @Test
-  public void testHBASE12817() throws IOException {
-    for (int i = 0; i < 100; i++) {
-      region
-          .put(new Put(Bytes.toBytes("obj" + (2900 + i))).addColumn(fam, qual1, Bytes.toBytes(i)));
-    }
-    region.put(new Put(Bytes.toBytes("obj299")).addColumn(fam, qual1, Bytes.toBytes("whatever")));
-    region.put(new Put(Bytes.toBytes("obj29")).addColumn(fam, qual1, Bytes.toBytes("whatever")));
-    region.put(new Put(Bytes.toBytes("obj2")).addColumn(fam, qual1, Bytes.toBytes("whatever")));
-    region.put(new Put(Bytes.toBytes("obj3")).addColumn(fam, qual1, Bytes.toBytes("whatever")));
-    region.flush(true);
-    Scan scan = new Scan(Bytes.toBytes("obj29995"));
-    RegionScanner scanner = region.getScanner(scan);
-    List<Cell> cells = new ArrayList<>();
-    assertFalse(scanner.next(cells));
-    assertArrayEquals(Bytes.toBytes("obj3"), Result.create(cells).getRow());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
deleted file mode 100644
index f325d8a..0000000
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/**
- * Copyright The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hbase.io.encoding;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayOutputStream;
-import java.io.DataOutputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Random;
-import java.util.concurrent.ConcurrentSkipListSet;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.ArrayBackedTag;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec;
-import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
-import org.apache.hadoop.hbase.io.hfile.HFileContext;
-import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-import org.apache.hadoop.hbase.nio.SingleByteBuff;
-import org.apache.hadoop.hbase.testclassification.IOTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.CollectionBackedScanner;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-/**
- * Tests scanning/seeking data with PrefixTree Encoding.
- */
-@RunWith(Parameterized.class)
-@Category({IOTests.class, SmallTests.class})
-public class TestPrefixTreeEncoding {
-  private static final Log LOG = LogFactory.getLog(TestPrefixTreeEncoding.class);
-  private static final String CF = "EncodingTestCF";
-  private static final byte[] CF_BYTES = Bytes.toBytes(CF);
-  private static final int NUM_ROWS_PER_BATCH = 50;
-  private static final int NUM_COLS_PER_ROW = 20;
-
-  private int numBatchesWritten = 0;
-  private ConcurrentSkipListSet<Cell> kvset = new ConcurrentSkipListSet<>(CellComparatorImpl.COMPARATOR);
-
-  private static boolean formatRowNum = false;
-
-  @Parameters
-  public static Collection<Object[]> parameters() {
-    List<Object[]> paramList = new ArrayList<>();
-    {
-      paramList.add(new Object[] { false });
-      paramList.add(new Object[] { true });
-    }
-    return paramList;
-  }
-  private final boolean includesTag;
-  public TestPrefixTreeEncoding(boolean includesTag) {
-    this.includesTag = includesTag;
-  }
-
-  @Before
-  public void setUp() throws Exception {
-    kvset.clear();
-    formatRowNum = false;
-  }
-
-  @Test
-  public void testSeekBeforeWithFixedData() throws Exception {
-    formatRowNum = true;
-    PrefixTreeCodec encoder = new PrefixTreeCodec();
-    int batchId = numBatchesWritten++;
-    HFileContext meta = new HFileContextBuilder()
-                        .withHBaseCheckSum(false)
-                        .withIncludesMvcc(false)
-                        .withIncludesTags(includesTag)
-                        .withCompression(Algorithm.NONE).build();
-    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
-        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
-    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
-    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
-    generateFixedTestData(kvset, batchId, false, includesTag, encoder, blkEncodingCtx,
-        userDataStream);
-    EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR,
-        encoder.newDataBlockDecodingContext(meta));
-    byte[] onDiskBytes = baosInMemory.toByteArray();
-    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
-        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
-    seeker.setCurrentBuffer(new SingleByteBuff(readBuffer));
-
-    // Seek before the first keyvalue;
-    Cell seekKey =
-        PrivateCellUtil.createFirstDeleteFamilyCellOnRow(getRowKey(batchId, 0), CF_BYTES);
-    seeker.seekToKeyInBlock(seekKey, true);
-    assertEquals(null, seeker.getCell());
-
-    // Seek before the middle keyvalue;
-    seekKey = PrivateCellUtil
-        .createFirstDeleteFamilyCellOnRow(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3), CF_BYTES);
-    seeker.seekToKeyInBlock(seekKey, true);
-    assertNotNull(seeker.getCell());
-    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1),
-      CellUtil.cloneRow(seeker.getCell()));
-
-    // Seek before the last keyvalue;
-    seekKey = PrivateCellUtil.createFirstDeleteFamilyCellOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
-    seeker.seekToKeyInBlock(seekKey, true);
-    assertNotNull(seeker.getCell());
-    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1),
-      CellUtil.cloneRow(seeker.getCell()));
-  }
-
-  @Test
-  public void testScanWithRandomData() throws Exception {
-    PrefixTreeCodec encoder = new PrefixTreeCodec();
-    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
-    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
-    HFileContext meta = new HFileContextBuilder()
-                        .withHBaseCheckSum(false)
-                        .withIncludesMvcc(false)
-                        .withIncludesTags(includesTag)
-                        .withCompression(Algorithm.NONE)
-                        .build();
-    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
-        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
-    generateRandomTestData(kvset, numBatchesWritten++, includesTag, encoder, blkEncodingCtx,
-        userDataStream);
-    EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR,
-        encoder.newDataBlockDecodingContext(meta));
-    byte[] onDiskBytes = baosInMemory.toByteArray();
-    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
-        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
-    seeker.setCurrentBuffer(new SingleByteBuff(readBuffer));
-    Cell previousKV = null;
-    do {
-      Cell currentKV = seeker.getCell();
-      System.out.println(currentKV);
-      if (previousKV != null && CellComparatorImpl.COMPARATOR.compare(currentKV, previousKV) < 0) {
-        dumpInputKVSet();
-        fail("Current kv " + currentKV + " is smaller than previous keyvalue " + previousKV);
-      }
-      if (!includesTag) {
-        assertFalse(currentKV.getTagsLength() > 0);
-      } else {
-        Assert.assertTrue(currentKV.getTagsLength() > 0);
-      }
-      previousKV = currentKV;
-    } while (seeker.next());
-  }
-
-  @Test
-  public void testSeekWithRandomData() throws Exception {
-    PrefixTreeCodec encoder = new PrefixTreeCodec();
-    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
-    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
-    int batchId = numBatchesWritten++;
-    HFileContext meta = new HFileContextBuilder()
-                        .withHBaseCheckSum(false)
-                        .withIncludesMvcc(false)
-                        .withIncludesTags(includesTag)
-                        .withCompression(Algorithm.NONE)
-                        .build();
-    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
-        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
-    generateRandomTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream);
-    EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR,
-        encoder.newDataBlockDecodingContext(meta));
-    byte[] onDiskBytes = baosInMemory.toByteArray();
-    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
-        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
-    verifySeeking(seeker, readBuffer, batchId);
-  }
-
-  @Test
-  public void testSeekWithFixedData() throws Exception {
-    PrefixTreeCodec encoder = new PrefixTreeCodec();
-    int batchId = numBatchesWritten++;
-    HFileContext meta = new HFileContextBuilder()
-                        .withHBaseCheckSum(false)
-                        .withIncludesMvcc(false)
-                        .withIncludesTags(includesTag)
-                        .withCompression(Algorithm.NONE)
-                        .build();
-    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
-        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
-    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
-    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
-    generateFixedTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream);
-    EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR,
-        encoder.newDataBlockDecodingContext(meta));
-    byte[] onDiskBytes = baosInMemory.toByteArray();
-    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
-        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
-    verifySeeking(seeker, readBuffer, batchId);
-  }
-
-  private void verifySeeking(EncodedSeeker encodeSeeker,
-      ByteBuffer encodedData, int batchId) {
-    List<KeyValue> kvList = new ArrayList<>();
-    for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
-      kvList.clear();
-      encodeSeeker.setCurrentBuffer(new SingleByteBuff(encodedData));
-      KeyValue firstOnRow = KeyValueUtil.createFirstOnRow(getRowKey(batchId, i));
-      encodeSeeker.seekToKeyInBlock(
-          new KeyValue.KeyOnlyKeyValue(firstOnRow.getBuffer(), firstOnRow.getKeyOffset(),
-              firstOnRow.getKeyLength()), false);
-      boolean hasMoreOfEncodeScanner = encodeSeeker.next();
-      CollectionBackedScanner collectionScanner = new CollectionBackedScanner(
-          this.kvset);
-      boolean hasMoreOfCollectionScanner = collectionScanner.seek(firstOnRow);
-      if (hasMoreOfEncodeScanner != hasMoreOfCollectionScanner) {
-        dumpInputKVSet();
-        fail("Get error result after seeking " + firstOnRow);
-      }
-      if (hasMoreOfEncodeScanner) {
-        if (CellComparatorImpl.COMPARATOR.compare(encodeSeeker.getCell(),
-            collectionScanner.peek()) != 0) {
-          dumpInputKVSet();
-          fail("Expected " + collectionScanner.peek() + " actual "
-              + encodeSeeker.getCell() + ", after seeking " + firstOnRow);
-        }
-      }
-    }
-  }
-
-  private void dumpInputKVSet() {
-    LOG.info("Dumping input keyvalue set in error case:");
-    for (Cell kv : kvset) {
-      System.out.println(kv);
-    }
-  }
-
-  private static void generateFixedTestData(ConcurrentSkipListSet<Cell> kvset, int batchId,
-      boolean useTags, PrefixTreeCodec encoder, HFileBlockEncodingContext blkEncodingCtx,
-      DataOutputStream userDataStream) throws Exception {
-    generateFixedTestData(kvset, batchId, true, useTags, encoder, blkEncodingCtx, userDataStream);
-  }
-
-  private static void generateFixedTestData(ConcurrentSkipListSet<Cell> kvset,
-      int batchId, boolean partial, boolean useTags, PrefixTreeCodec encoder,
-      HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception {
-    for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
-      if (partial && i / 10 % 2 == 1)
-        continue;
-      for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
-        if (!useTags) {
-          KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), getValue(
-              batchId, i, j));
-          kvset.add(kv);
-        } else {
-          KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l,
-              getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") });
-          kvset.add(kv);
-        }
-      }
-    }
-    encoder.startBlockEncoding(blkEncodingCtx, userDataStream);
-    for (Cell kv : kvset) {
-      encoder.encode(kv, blkEncodingCtx, userDataStream);
-    }
-    encoder.endBlockEncoding(blkEncodingCtx, userDataStream, null);
-  }
-
-  private static void generateRandomTestData(ConcurrentSkipListSet<Cell> kvset,
-      int batchId, boolean useTags, PrefixTreeCodec encoder,
-      HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception {
-    Random random = new Random();
-    for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
-      if (random.nextInt(100) < 50)
-        continue;
-      for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
-        if (random.nextInt(100) < 50)
-          continue;
-        if (!useTags) {
-          KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), getValue(
-              batchId, i, j));
-          kvset.add(kv);
-        } else {
-          KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l,
-              getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") });
-          kvset.add(kv);
-        }
-      }
-    }
-    encoder.startBlockEncoding(blkEncodingCtx, userDataStream);
-    for (Cell kv : kvset) {
-      encoder.encode(kv, blkEncodingCtx, userDataStream);
-    }
-    encoder.endBlockEncoding(blkEncodingCtx, userDataStream, null);
-  }
-
-  private static byte[] getRowKey(int batchId, int i) {
-    return Bytes
-        .toBytes("batch" + batchId + "_row" + (formatRowNum ? String.format("%04d", i) : i));
-  }
-
-  private static byte[] getQualifier(int j) {
-    return Bytes.toBytes("colfdfafhfhsdfhsdfh" + j);
-  }
-
-  private static byte[] getValue(int batchId, int i, int j) {
-    return Bytes.toBytes("value_for_" + Bytes.toString(getRowKey(batchId, i)) + "_col" + j);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8c58930/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
index 23d3fbe..8a9e9dd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
@@ -272,10 +272,9 @@ public class TestSeekToBlockWithEncoders {
     // create all seekers
     List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
     for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
-      if (encoding.getEncoder() == null || encoding == DataBlockEncoding.PREFIX_TREE) {
+      if (encoding.getEncoder() == null) {
         continue;
       }
-
       DataBlockEncoder encoder = encoding.getEncoder();
       HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false)
           .withIncludesMvcc(false).withIncludesTags(false)