You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2017/09/25 01:58:44 UTC

[02/11] hbase git commit: HBASE-18825 Use HStoreFile instead of StoreFile in our own code base and remove unnecessary methods in StoreFile interface

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
index 76959c6..6e5aeed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
@@ -22,7 +22,6 @@ import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.OPEN_K
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -87,10 +86,10 @@ public class TestStripeStoreFileManager {
   @Test
   public void testInsertFilesIntoL0() throws Exception {
     StripeStoreFileManager manager = createManager();
-    MockStoreFile sf = createFile();
+    MockHStoreFile sf = createFile();
     manager.insertNewFiles(al(sf));
     assertEquals(1, manager.getStorefileCount());
-    Collection<StoreFile> filesForGet = manager.getFilesForScan(KEY_A, true, KEY_A, true);
+    Collection<HStoreFile> filesForGet = manager.getFilesForScan(KEY_A, true, KEY_A, true);
     assertEquals(1, filesForGet.size());
     assertTrue(filesForGet.contains(sf));
 
@@ -109,14 +108,14 @@ public class TestStripeStoreFileManager {
     manager.addCompactionResults(al(), al(createFile(OPEN_KEY, KEY_B),
         createFile(KEY_B, OPEN_KEY)));
     assertEquals(4, manager.getStorefileCount());
-    Collection<StoreFile> allFiles = manager.clearFiles();
+    Collection<HStoreFile> allFiles = manager.clearFiles();
     assertEquals(4, allFiles.size());
     assertEquals(0, manager.getStorefileCount());
     assertEquals(0, manager.getStorefiles().size());
   }
 
-  private static ArrayList<StoreFile> dumpIterator(Iterator<StoreFile> iter) {
-    ArrayList<StoreFile> result = new ArrayList<>();
+  private static ArrayList<HStoreFile> dumpIterator(Iterator<HStoreFile> iter) {
+    ArrayList<HStoreFile> result = new ArrayList<>();
     for (; iter.hasNext(); result.add(iter.next()));
     return result;
   }
@@ -124,23 +123,23 @@ public class TestStripeStoreFileManager {
   @Test
   public void testRowKeyBefore() throws Exception {
     StripeStoreFileManager manager = createManager();
-    StoreFile l0File = createFile(), l0File2 = createFile();
+    HStoreFile l0File = createFile(), l0File2 = createFile();
     manager.insertNewFiles(al(l0File));
     manager.insertNewFiles(al(l0File2));
     // Get candidate files.
-    Iterator<StoreFile> sfs = manager.getCandidateFilesForRowKeyBefore(KV_B);
+    Iterator<HStoreFile> sfs = manager.getCandidateFilesForRowKeyBefore(KV_B);
     sfs.next();
     sfs.remove();
     // Suppose we found a candidate in this file... make sure L0 file remaining is not removed.
     sfs = manager.updateCandidateFilesForRowKeyBefore(sfs, KV_B, KV_A);
     assertTrue(sfs.hasNext());
     // Now add some stripes (remove L0 file too)
-    MockStoreFile stripe0a = createFile(0, 100, OPEN_KEY, KEY_B),
+    MockHStoreFile stripe0a = createFile(0, 100, OPEN_KEY, KEY_B),
         stripe1 = createFile(KEY_B, OPEN_KEY);
     manager.addCompactionResults(al(l0File), al(stripe0a, stripe1));
     manager.removeCompactedFiles(al(l0File));
     // If we want a key <= KEY_A, we should get everything except stripe1.
-    ArrayList<StoreFile> sfsDump = dumpIterator(manager.getCandidateFilesForRowKeyBefore(KV_A));
+    ArrayList<HStoreFile> sfsDump = dumpIterator(manager.getCandidateFilesForRowKeyBefore(KV_A));
     assertEquals(2, sfsDump.size());
     assertTrue(sfsDump.contains(stripe0a));
     assertFalse(sfsDump.contains(stripe1));
@@ -162,7 +161,7 @@ public class TestStripeStoreFileManager {
     // Add one more, later, file to stripe0, remove the last annoying L0 file.
     // This file should be returned in preference to older L0 file; also, after we get
     // a candidate from the first file, the old one should not be removed.
-    StoreFile stripe0b = createFile(0, 101, OPEN_KEY, KEY_B);
+    HStoreFile stripe0b = createFile(0, 101, OPEN_KEY, KEY_B);
     manager.addCompactionResults(al(l0File2), al(stripe0b));
     manager.removeCompactedFiles(al(l0File2));
     sfs = manager.getCandidateFilesForRowKeyBefore(KV_A);
@@ -176,24 +175,24 @@ public class TestStripeStoreFileManager {
   public void testGetSplitPointEdgeCases() throws Exception {
     StripeStoreFileManager manager = createManager();
     // No files => no split.
-    assertNull(manager.getSplitPoint());
+    assertFalse(manager.getSplitPoint().isPresent());
 
     // If there are no stripes, should pick midpoint from the biggest file in L0.
-    MockStoreFile sf5 = createFile(5, 0);
+    MockHStoreFile sf5 = createFile(5, 0);
     sf5.splitPoint = new byte[] { 1 };
     manager.insertNewFiles(al(sf5));
     manager.insertNewFiles(al(createFile(1, 0)));
-    assertArrayEquals(sf5.splitPoint, manager.getSplitPoint());
+    assertArrayEquals(sf5.splitPoint, manager.getSplitPoint().get());
 
     // Same if there's one stripe but the biggest file is still in L0.
     manager.addCompactionResults(al(), al(createFile(2, 0, OPEN_KEY, OPEN_KEY)));
-    assertArrayEquals(sf5.splitPoint, manager.getSplitPoint());
+    assertArrayEquals(sf5.splitPoint, manager.getSplitPoint().get());
 
     // If the biggest file is in the stripe, should get from it.
-    MockStoreFile sf6 = createFile(6, 0, OPEN_KEY, OPEN_KEY);
+    MockHStoreFile sf6 = createFile(6, 0, OPEN_KEY, OPEN_KEY);
     sf6.splitPoint = new byte[] { 2 };
     manager.addCompactionResults(al(), al(sf6));
-    assertArrayEquals(sf6.splitPoint, manager.getSplitPoint());
+    assertArrayEquals(sf6.splitPoint, manager.getSplitPoint().get());
   }
 
   @Test
@@ -234,11 +233,11 @@ public class TestStripeStoreFileManager {
   private void verifySplitPointScenario(int splitPointAfter, boolean shouldSplitStripe,
       float splitRatioToVerify, int... sizes) throws Exception {
     assertTrue(sizes.length > 1);
-    ArrayList<StoreFile> sfs = new ArrayList<>();
+    ArrayList<HStoreFile> sfs = new ArrayList<>();
     for (int sizeIx = 0; sizeIx < sizes.length; ++sizeIx) {
       byte[] startKey = (sizeIx == 0) ? OPEN_KEY : Bytes.toBytes(sizeIx - 1);
       byte[] endKey = (sizeIx == sizes.length - 1) ? OPEN_KEY : Bytes.toBytes(sizeIx);
-      MockStoreFile sf = createFile(sizes[sizeIx], 0, startKey, endKey);
+      MockHStoreFile sf = createFile(sizes[sizeIx], 0, startKey, endKey);
       sf.splitPoint = Bytes.toBytes(-sizeIx); // set split point to the negative index
       sfs.add(sf);
     }
@@ -249,7 +248,7 @@ public class TestStripeStoreFileManager {
     }
     StripeStoreFileManager manager = createManager(al(), conf);
     manager.addCompactionResults(al(), sfs);
-    int result = Bytes.toInt(manager.getSplitPoint());
+    int result = Bytes.toInt(manager.getSplitPoint().get());
     // Either end key and thus positive index, or "middle" of the file and thus negative index.
     assertEquals(splitPointAfter * (shouldSplitStripe ? -1 : 1), result);
   }
@@ -265,7 +264,7 @@ public class TestStripeStoreFileManager {
     verifyGetAndScanScenario(manager, KEY_B, KEY_C);
 
     // Populate one L0 file.
-    MockStoreFile sf0 = createFile();
+    MockHStoreFile sf0 = createFile();
     manager.insertNewFiles(al(sf0));
     verifyGetAndScanScenario(manager, null, null,   sf0);
     verifyGetAndScanScenario(manager, null, KEY_C,  sf0);
@@ -273,11 +272,11 @@ public class TestStripeStoreFileManager {
     verifyGetAndScanScenario(manager, KEY_B, KEY_C, sf0);
 
     // Populate a bunch of files for stripes, keep L0.
-    MockStoreFile sfA = createFile(OPEN_KEY, KEY_A);
-    MockStoreFile sfB = createFile(KEY_A, KEY_B);
-    MockStoreFile sfC = createFile(KEY_B, KEY_C);
-    MockStoreFile sfD = createFile(KEY_C, KEY_D);
-    MockStoreFile sfE = createFile(KEY_D, OPEN_KEY);
+    MockHStoreFile sfA = createFile(OPEN_KEY, KEY_A);
+    MockHStoreFile sfB = createFile(KEY_A, KEY_B);
+    MockHStoreFile sfC = createFile(KEY_B, KEY_C);
+    MockHStoreFile sfD = createFile(KEY_C, KEY_D);
+    MockHStoreFile sfE = createFile(KEY_D, OPEN_KEY);
     manager.addCompactionResults(al(), al(sfA, sfB, sfC, sfD, sfE));
 
     verifyGetAndScanScenario(manager, null, null,              sf0, sfA, sfB, sfC, sfD, sfE);
@@ -292,7 +291,7 @@ public class TestStripeStoreFileManager {
   }
 
   private void verifyGetAndScanScenario(StripeStoreFileManager manager, byte[] start, byte[] end,
-      StoreFile... results) throws Exception {
+      HStoreFile... results) throws Exception {
     verifyGetOrScanScenario(manager, start, end, results);
   }
 
@@ -302,18 +301,18 @@ public class TestStripeStoreFileManager {
     // In L0, there will be file w/o metadata (real L0, 3 files with invalid metadata, and 3
     // files that overlap valid stripes in various ways). Note that the 4th way to overlap the
     // stripes will cause the structure to be mostly scraped, and is tested separately.
-    ArrayList<StoreFile> validStripeFiles = al(createFile(OPEN_KEY, KEY_B),
+    ArrayList<HStoreFile> validStripeFiles = al(createFile(OPEN_KEY, KEY_B),
         createFile(KEY_B, KEY_C), createFile(KEY_C, OPEN_KEY),
         createFile(KEY_C, OPEN_KEY));
-    ArrayList<StoreFile> filesToGoToL0 = al(createFile(), createFile(null, KEY_A),
+    ArrayList<HStoreFile> filesToGoToL0 = al(createFile(), createFile(null, KEY_A),
         createFile(KEY_D, null), createFile(KEY_D, KEY_A), createFile(keyAfter(KEY_A), KEY_C),
         createFile(OPEN_KEY, KEY_D), createFile(KEY_D, keyAfter(KEY_D)));
-    ArrayList<StoreFile> allFilesToGo = flattenLists(validStripeFiles, filesToGoToL0);
+    ArrayList<HStoreFile> allFilesToGo = flattenLists(validStripeFiles, filesToGoToL0);
     Collections.shuffle(allFilesToGo);
     StripeStoreFileManager manager = createManager(allFilesToGo);
-    List<StoreFile> l0Files = manager.getLevel0Files();
+    List<HStoreFile> l0Files = manager.getLevel0Files();
     assertEquals(filesToGoToL0.size(), l0Files.size());
-    for (StoreFile sf : filesToGoToL0) {
+    for (HStoreFile sf : filesToGoToL0) {
       assertTrue(l0Files.contains(sf));
     }
     verifyAllFiles(manager, allFilesToGo);
@@ -323,7 +322,7 @@ public class TestStripeStoreFileManager {
   public void testLoadFilesWithBadStripe() throws Exception {
     // Current "algorithm" will see the after-B key before C key, add it as valid stripe,
     // and then fail all other stripes. So everything would end up in L0.
-    ArrayList<StoreFile> allFilesToGo = al(createFile(OPEN_KEY, KEY_B),
+    ArrayList<HStoreFile> allFilesToGo = al(createFile(OPEN_KEY, KEY_B),
         createFile(KEY_B, KEY_C), createFile(KEY_C, OPEN_KEY),
         createFile(KEY_B, keyAfter(KEY_B)));
     Collections.shuffle(allFilesToGo);
@@ -346,7 +345,7 @@ public class TestStripeStoreFileManager {
   @Test
   public void testLoadFilesAfterSplit() throws Exception {
     // If stripes are good but have non-open ends, they must be treated as open ends.
-    MockStoreFile sf = createFile(KEY_B, KEY_C);
+    MockHStoreFile sf = createFile(KEY_B, KEY_C);
     StripeStoreFileManager manager = createManager(al(createFile(OPEN_KEY, KEY_B), sf));
     assertEquals(0, manager.getLevel0Files().size());
     // Here, [B, C] is logically [B, inf), so we should be able to compact it to that only.
@@ -367,7 +366,7 @@ public class TestStripeStoreFileManager {
   public void testAddingCompactionResults() throws Exception {
     StripeStoreFileManager manager = createManager();
     // First, add some L0 files and "compact" one with new stripe creation.
-    StoreFile sf_L0_0a = createFile(), sf_L0_0b = createFile();
+    HStoreFile sf_L0_0a = createFile(), sf_L0_0b = createFile();
     manager.insertNewFiles(al(sf_L0_0a, sf_L0_0b));
 
     // Try compacting with invalid new branches (gaps, overlaps) - no effect.
@@ -379,24 +378,24 @@ public class TestStripeStoreFileManager {
     verifyInvalidCompactionScenario(manager, al(sf_L0_0a), al(createFile(OPEN_KEY, KEY_B),
         createFile(KEY_A, KEY_B), createFile(KEY_B, OPEN_KEY)));
 
-    StoreFile sf_i2B_0 = createFile(OPEN_KEY, KEY_B);
-    StoreFile sf_B2C_0 = createFile(KEY_B, KEY_C);
-    StoreFile sf_C2i_0 = createFile(KEY_C, OPEN_KEY);
+    HStoreFile sf_i2B_0 = createFile(OPEN_KEY, KEY_B);
+    HStoreFile sf_B2C_0 = createFile(KEY_B, KEY_C);
+    HStoreFile sf_C2i_0 = createFile(KEY_C, OPEN_KEY);
     manager.addCompactionResults(al(sf_L0_0a), al(sf_i2B_0, sf_B2C_0, sf_C2i_0));
     manager.removeCompactedFiles(al(sf_L0_0a));
     verifyAllFiles(manager, al(sf_L0_0b, sf_i2B_0, sf_B2C_0, sf_C2i_0));
 
     // Add another l0 file, "compact" both L0 into two stripes
-    StoreFile sf_L0_1 = createFile();
-    StoreFile sf_i2B_1 = createFile(OPEN_KEY, KEY_B);
-    StoreFile sf_B2C_1 = createFile(KEY_B, KEY_C);
+    HStoreFile sf_L0_1 = createFile();
+    HStoreFile sf_i2B_1 = createFile(OPEN_KEY, KEY_B);
+    HStoreFile sf_B2C_1 = createFile(KEY_B, KEY_C);
     manager.insertNewFiles(al(sf_L0_1));
     manager.addCompactionResults(al(sf_L0_0b, sf_L0_1), al(sf_i2B_1, sf_B2C_1));
     manager.removeCompactedFiles(al(sf_L0_0b, sf_L0_1));
     verifyAllFiles(manager, al(sf_i2B_0, sf_B2C_0, sf_C2i_0, sf_i2B_1, sf_B2C_1));
 
     // Try compacting with invalid file (no metadata) - should add files to L0.
-    StoreFile sf_L0_2 = createFile(null, null);
+    HStoreFile sf_L0_2 = createFile(null, null);
     manager.addCompactionResults(al(), al(sf_L0_2));
     manager.removeCompactedFiles(al());
     verifyAllFiles(manager, al(sf_i2B_0, sf_B2C_0, sf_C2i_0, sf_i2B_1, sf_B2C_1, sf_L0_2));
@@ -405,46 +404,46 @@ public class TestStripeStoreFileManager {
     manager.removeCompactedFiles(al(sf_L0_2));
 
     // Do regular compaction in the first stripe.
-    StoreFile sf_i2B_3 = createFile(OPEN_KEY, KEY_B);
+    HStoreFile sf_i2B_3 = createFile(OPEN_KEY, KEY_B);
     manager.addCompactionResults(al(sf_i2B_0, sf_i2B_1), al(sf_i2B_3));
     manager.removeCompactedFiles(al(sf_i2B_0, sf_i2B_1));
     verifyAllFiles(manager, al(sf_B2C_0, sf_C2i_0, sf_B2C_1, sf_i2B_3));
 
     // Rebalance two stripes.
-    StoreFile sf_B2D_4 = createFile(KEY_B, KEY_D);
-    StoreFile sf_D2i_4 = createFile(KEY_D, OPEN_KEY);
+    HStoreFile sf_B2D_4 = createFile(KEY_B, KEY_D);
+    HStoreFile sf_D2i_4 = createFile(KEY_D, OPEN_KEY);
     manager.addCompactionResults(al(sf_B2C_0, sf_C2i_0, sf_B2C_1), al(sf_B2D_4, sf_D2i_4));
     manager.removeCompactedFiles(al(sf_B2C_0, sf_C2i_0, sf_B2C_1));
     verifyAllFiles(manager, al(sf_i2B_3, sf_B2D_4, sf_D2i_4));
 
     // Split the first stripe.
-    StoreFile sf_i2A_5 = createFile(OPEN_KEY, KEY_A);
-    StoreFile sf_A2B_5 = createFile(KEY_A, KEY_B);
+    HStoreFile sf_i2A_5 = createFile(OPEN_KEY, KEY_A);
+    HStoreFile sf_A2B_5 = createFile(KEY_A, KEY_B);
     manager.addCompactionResults(al(sf_i2B_3), al(sf_i2A_5, sf_A2B_5));
     manager.removeCompactedFiles(al(sf_i2B_3));
     verifyAllFiles(manager, al(sf_B2D_4, sf_D2i_4, sf_i2A_5, sf_A2B_5));
 
     // Split the middle stripe.
-    StoreFile sf_B2C_6 = createFile(KEY_B, KEY_C);
-    StoreFile sf_C2D_6 = createFile(KEY_C, KEY_D);
+    HStoreFile sf_B2C_6 = createFile(KEY_B, KEY_C);
+    HStoreFile sf_C2D_6 = createFile(KEY_C, KEY_D);
     manager.addCompactionResults(al(sf_B2D_4), al(sf_B2C_6, sf_C2D_6));
     manager.removeCompactedFiles(al(sf_B2D_4));
     verifyAllFiles(manager, al(sf_D2i_4, sf_i2A_5, sf_A2B_5, sf_B2C_6, sf_C2D_6));
 
     // Merge two different middle stripes.
-    StoreFile sf_A2C_7 = createFile(KEY_A, KEY_C);
+    HStoreFile sf_A2C_7 = createFile(KEY_A, KEY_C);
     manager.addCompactionResults(al(sf_A2B_5, sf_B2C_6), al(sf_A2C_7));
     manager.removeCompactedFiles(al(sf_A2B_5, sf_B2C_6));
     verifyAllFiles(manager, al(sf_D2i_4, sf_i2A_5, sf_C2D_6, sf_A2C_7));
 
     // Merge lower half.
-    StoreFile sf_i2C_8 = createFile(OPEN_KEY, KEY_C);
+    HStoreFile sf_i2C_8 = createFile(OPEN_KEY, KEY_C);
     manager.addCompactionResults(al(sf_i2A_5, sf_A2C_7), al(sf_i2C_8));
     manager.removeCompactedFiles(al(sf_i2A_5, sf_A2C_7));
     verifyAllFiles(manager, al(sf_D2i_4, sf_C2D_6, sf_i2C_8));
 
     // Merge all.
-    StoreFile sf_i2i_9 = createFile(OPEN_KEY, OPEN_KEY);
+    HStoreFile sf_i2i_9 = createFile(OPEN_KEY, OPEN_KEY);
     manager.addCompactionResults(al(sf_D2i_4, sf_C2D_6, sf_i2C_8), al(sf_i2i_9));
     manager.removeCompactedFiles(al(sf_D2i_4, sf_C2D_6, sf_i2C_8));
     verifyAllFiles(manager, al(sf_i2i_9));
@@ -455,11 +454,11 @@ public class TestStripeStoreFileManager {
     // Add file flush into stripes
     StripeStoreFileManager sfm = createManager();
     assertEquals(0, sfm.getStripeCount());
-    StoreFile sf_i2c = createFile(OPEN_KEY, KEY_C), sf_c2i = createFile(KEY_C, OPEN_KEY);
+    HStoreFile sf_i2c = createFile(OPEN_KEY, KEY_C), sf_c2i = createFile(KEY_C, OPEN_KEY);
     sfm.insertNewFiles(al(sf_i2c, sf_c2i));
     assertEquals(2, sfm.getStripeCount());
     // Now try to add conflicting flush - should throw.
-    StoreFile sf_i2d = createFile(OPEN_KEY, KEY_D), sf_d2i = createFile(KEY_D, OPEN_KEY);
+    HStoreFile sf_i2d = createFile(OPEN_KEY, KEY_D), sf_d2i = createFile(KEY_D, OPEN_KEY);
     sfm.insertNewFiles(al(sf_i2d, sf_d2i));
     assertEquals(2, sfm.getStripeCount());
     assertEquals(2, sfm.getLevel0Files().size());
@@ -470,7 +469,7 @@ public class TestStripeStoreFileManager {
     assertEquals(0, sfm.getLevel0Files().size());
     // Add another file to stripe; then "rebalance" stripes w/o it - the file, which was
     // presumably flushed during compaction, should go to L0.
-    StoreFile sf_i2c_2 = createFile(OPEN_KEY, KEY_C);
+    HStoreFile sf_i2c_2 = createFile(OPEN_KEY, KEY_C);
     sfm.insertNewFiles(al(sf_i2c_2));
     sfm.addCompactionResults(al(sf_i2c, sf_c2i), al(sf_i2d, sf_d2i));
     sfm.removeCompactedFiles(al(sf_i2c, sf_c2i));
@@ -482,16 +481,16 @@ public class TestStripeStoreFileManager {
   public void testEmptyResultsForStripes() throws Exception {
     // Test that we can compact L0 into a subset of stripes.
     StripeStoreFileManager manager = createManager();
-    StoreFile sf0a = createFile();
-    StoreFile sf0b = createFile();
+    HStoreFile sf0a = createFile();
+    HStoreFile sf0b = createFile();
     manager.insertNewFiles(al(sf0a));
     manager.insertNewFiles(al(sf0b));
-    ArrayList<StoreFile> compacted = al(createFile(OPEN_KEY, KEY_B),
+    ArrayList<HStoreFile> compacted = al(createFile(OPEN_KEY, KEY_B),
         createFile(KEY_B, KEY_C), createFile(KEY_C, OPEN_KEY));
     manager.addCompactionResults(al(sf0a), compacted);
     manager.removeCompactedFiles(al(sf0a));
     // Next L0 compaction only produces file for the first and last stripe.
-    ArrayList<StoreFile> compacted2 = al(createFile(OPEN_KEY, KEY_B), createFile(KEY_C, OPEN_KEY));
+    ArrayList<HStoreFile> compacted2 = al(createFile(OPEN_KEY, KEY_B), createFile(KEY_C, OPEN_KEY));
     manager.addCompactionResults(al(sf0b), compacted2);
     manager.removeCompactedFiles(al(sf0b));
     compacted.addAll(compacted2);
@@ -526,7 +525,7 @@ public class TestStripeStoreFileManager {
       sfm.insertNewFiles(al(createFile()));
     }
     for (int i = 0; i < filesInStripe; ++i) {
-      ArrayList<StoreFile> stripe = new ArrayList<>();
+      ArrayList<HStoreFile> stripe = new ArrayList<>();
       for (int j = 0; j < stripes; ++j) {
         stripe.add(createFile(
             (j == 0) ? OPEN_KEY : keys[j - 1], (j == stripes - 1) ? OPEN_KEY : keys[j]));
@@ -537,8 +536,8 @@ public class TestStripeStoreFileManager {
   }
 
   private void verifyInvalidCompactionScenario(StripeStoreFileManager manager,
-      ArrayList<StoreFile> filesToCompact, ArrayList<StoreFile> filesToInsert) throws Exception {
-    Collection<StoreFile> allFiles = manager.getStorefiles();
+      ArrayList<HStoreFile> filesToCompact, ArrayList<HStoreFile> filesToInsert) throws Exception {
+    Collection<HStoreFile> allFiles = manager.getStorefiles();
     try {
        manager.addCompactionResults(filesToCompact, filesToInsert);
        fail("Should have thrown");
@@ -549,33 +548,33 @@ public class TestStripeStoreFileManager {
   }
 
   private void verifyGetOrScanScenario(StripeStoreFileManager manager, byte[] start, byte[] end,
-      StoreFile... results) throws Exception {
+      HStoreFile... results) throws Exception {
     verifyGetOrScanScenario(manager, start, end, Arrays.asList(results));
   }
 
   private void verifyGetOrScanScenario(StripeStoreFileManager manager, byte[] start, byte[] end,
-      Collection<StoreFile> results) throws Exception {
+      Collection<HStoreFile> results) throws Exception {
     start = start != null ? start : HConstants.EMPTY_START_ROW;
     end = end != null ? end : HConstants.EMPTY_END_ROW;
-    Collection<StoreFile> sfs = manager.getFilesForScan(start, true, end, false);
+    Collection<HStoreFile> sfs = manager.getFilesForScan(start, true, end, false);
     assertEquals(results.size(), sfs.size());
-    for (StoreFile result : results) {
+    for (HStoreFile result : results) {
       assertTrue(sfs.contains(result));
     }
   }
 
   private void verifyAllFiles(
-      StripeStoreFileManager manager, Collection<StoreFile> results) throws Exception {
+      StripeStoreFileManager manager, Collection<HStoreFile> results) throws Exception {
     verifyGetOrScanScenario(manager, null, null, results);
   }
 
   // TODO: replace with Mockito?
-  private static MockStoreFile createFile(
+  private static MockHStoreFile createFile(
       long size, long seqNum, byte[] startKey, byte[] endKey) throws Exception {
     FileSystem fs = TEST_UTIL.getTestFileSystem();
     Path testFilePath = StoreFileWriter.getUniqueFile(fs, CFDIR);
     fs.create(testFilePath).close();
-    MockStoreFile sf = new MockStoreFile(TEST_UTIL, testFilePath, size, 0, false, seqNum);
+    MockHStoreFile sf = new MockHStoreFile(TEST_UTIL, testFilePath, size, 0, false, seqNum);
     if (startKey != null) {
       sf.setMetadataValue(StripeStoreFileManager.STRIPE_START_KEY, startKey);
     }
@@ -585,15 +584,15 @@ public class TestStripeStoreFileManager {
     return sf;
   }
 
-  private static MockStoreFile createFile(long size, long seqNum) throws Exception {
+  private static MockHStoreFile createFile(long size, long seqNum) throws Exception {
     return createFile(size, seqNum, null, null);
   }
 
-  private static MockStoreFile createFile(byte[] startKey, byte[] endKey) throws Exception {
+  private static MockHStoreFile createFile(byte[] startKey, byte[] endKey) throws Exception {
     return createFile(0, 0, startKey, endKey);
   }
 
-  private static MockStoreFile createFile() throws Exception {
+  private static MockHStoreFile createFile() throws Exception {
     return createFile(null, null);
   }
 
@@ -601,12 +600,12 @@ public class TestStripeStoreFileManager {
     return createManager(new ArrayList<>());
   }
 
-  private static StripeStoreFileManager createManager(ArrayList<StoreFile> sfs) throws Exception {
+  private static StripeStoreFileManager createManager(ArrayList<HStoreFile> sfs) throws Exception {
     return createManager(sfs, TEST_UTIL.getConfiguration());
   }
 
   private static StripeStoreFileManager createManager(
-      ArrayList<StoreFile> sfs, Configuration conf) throws Exception {
+      ArrayList<HStoreFile> sfs, Configuration conf) throws Exception {
     StripeStoreConfig config = new StripeStoreConfig(
         conf, Mockito.mock(StoreConfigInformation.class));
     StripeStoreFileManager result = new StripeStoreFileManager(CellComparator.COMPARATOR, conf,
@@ -615,13 +614,13 @@ public class TestStripeStoreFileManager {
     return result;
   }
 
-  private static ArrayList<StoreFile> al(StoreFile... sfs) {
+  private static ArrayList<HStoreFile> al(HStoreFile... sfs) {
     return new ArrayList<>(Arrays.asList(sfs));
   }
 
-  private static ArrayList<StoreFile> flattenLists(ArrayList<StoreFile>... sfls) {
-    ArrayList<StoreFile> result = new ArrayList<>();
-    for (ArrayList<StoreFile> sfl : sfls) {
+  private static ArrayList<HStoreFile> flattenLists(ArrayList<HStoreFile>... sfls) {
+    ArrayList<HStoreFile> result = new ArrayList<>();
+    for (ArrayList<HStoreFile> sfl : sfls) {
       result.addAll(sfl);
     }
     return result;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
index 767ad2e..730696c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java
@@ -28,12 +28,12 @@ import java.util.concurrent.ThreadLocalRandom;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@@ -67,7 +67,10 @@ public class TestSwitchToStreamRead {
     }
     VALUE_PREFIX = sb.append("-").toString();
     REGION = UTIL.createLocalHRegion(
-      new HTableDescriptor(TABLE_NAME).addFamily(new HColumnDescriptor(FAMILY).setBlocksize(1024)),
+      TableDescriptorBuilder.newBuilder(TABLE_NAME)
+          .addColumnFamily(
+            ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBlocksize(1024).build())
+          .build(),
       null, null);
     for (int i = 0; i < 900; i++) {
       REGION
@@ -122,7 +125,7 @@ public class TestSwitchToStreamRead {
       }
     }
     // make sure all scanners are closed.
-    for (StoreFile sf : REGION.getStore(FAMILY).getStorefiles()) {
+    for (HStoreFile sf : REGION.getStore(FAMILY).getStorefiles()) {
       assertFalse(sf.isReferencedInReads());
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
index 5014b41..2e47566 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
@@ -15,14 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 /**
  * Class to generate several lists of StoreFiles that are all the same size.
@@ -37,8 +36,8 @@ class ConstantSizeFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public final Iterator<List<StoreFile>> iterator() {
-    return new Iterator<List<StoreFile>>() {
+  public final Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private int count = 0;
 
       @Override
@@ -47,9 +46,9 @@ class ConstantSizeFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
+      public List<HStoreFile> next() {
         count += 1;
-        ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
+        ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
         for (int i = 0; i < NUM_FILES_GEN; i++) {
           files.add(createMockStoreFile(FILESIZE));
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
index 46bb639..ca65bf1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 
@@ -23,8 +22,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 
 /**
  * Test Policy to compact everything every time.
@@ -36,19 +35,16 @@ public class EverythingPolicy extends RatioBasedCompactionPolicy {
    * @param conf            The Conf.
    * @param storeConfigInfo Info about the store.
    */
-  public EverythingPolicy(final Configuration conf,
-                          final StoreConfigInformation storeConfigInfo) {
+  public EverythingPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) {
     super(conf, storeConfigInfo);
   }
 
   @Override
-  protected final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates,
-    final boolean mayUseOffPeak, final boolean mayBeStuck) throws IOException {
-
+  protected final ArrayList<HStoreFile> applyCompactionPolicy(ArrayList<HStoreFile> candidates,
+      boolean mayUseOffPeak, boolean mayBeStuck) throws IOException {
     if (candidates.size() < comConf.getMinFilesToCompact()) {
       return new ArrayList<>(0);
     }
-
     return new ArrayList<>(candidates);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
index 24302b8..e14f696 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
@@ -15,13 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 /**
  * Class to create list of mock storefiles of specified length.
@@ -53,8 +52,8 @@ class ExplicitFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public final Iterator<List<StoreFile>> iterator() {
-    return new Iterator<List<StoreFile>>() {
+  public final Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private int nextIndex = 0;
       @Override
       public boolean hasNext() {
@@ -62,8 +61,8 @@ class ExplicitFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
-        List<StoreFile> files =  createStoreFileList(fileSizes[nextIndex]);
+      public List<HStoreFile> next() {
+        List<HStoreFile> files =  createStoreFileList(fileSizes[nextIndex]);
         nextIndex += 1;
         return files;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
index a2964ff..fe5f904 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.ArrayList;
@@ -24,7 +23,7 @@ import java.util.List;
 
 import org.apache.commons.math3.random.GaussianRandomGenerator;
 import org.apache.commons.math3.random.MersenneTwister;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 class GaussianFileListGenerator extends StoreFileListGenerator {
 
@@ -33,8 +32,8 @@ class GaussianFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public Iterator<List<StoreFile>> iterator() {
-    return new Iterator<List<StoreFile>>() {
+  public Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private GaussianRandomGenerator gen =
           new GaussianRandomGenerator(new MersenneTwister(random.nextInt()));
       private int count = 0;
@@ -45,9 +44,9 @@ class GaussianFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
+      public List<HStoreFile> next() {
         count += 1;
-        ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
+        ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
         for (int i = 0; i < NUM_FILES_GEN; i++) {
           files.add(createMockStoreFile(
               (int) Math.ceil(Math.max(0, gen.nextNormalizedDouble() * 32 + 32)))

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
index 85350ca..21cee13 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.regionserver.compactions;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
-
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Random;
 
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.util.StringUtils;
 
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
+
 /**
  * Base class of objects that can create mock store files with a given size.
  */
@@ -42,31 +42,31 @@ class MockStoreFileGenerator {
   /** The random number generator. */
   protected Random random;
 
-  MockStoreFileGenerator(Class klass) {
+  MockStoreFileGenerator(Class<?> klass) {
     random = new Random(klass.getSimpleName().hashCode());
   }
 
-  protected List<StoreFile> createStoreFileList(final int[] fs) {
-    List<StoreFile> storeFiles = new LinkedList<>();
+  protected List<HStoreFile> createStoreFileList(final int[] fs) {
+    List<HStoreFile> storeFiles = new LinkedList<>();
     for (int fileSize : fs) {
       storeFiles.add(createMockStoreFile(fileSize));
     }
     return storeFiles;
   }
 
-  protected StoreFile createMockStoreFile(final long size) {
+  protected HStoreFile createMockStoreFile(final long size) {
     return createMockStoreFile(size * 1024 * 1024, -1L);
   }
 
-  protected StoreFile createMockStoreFileBytes(final long size) {
+  protected HStoreFile createMockStoreFileBytes(final long size) {
     return createMockStoreFile(size, -1L);
   }
 
-  protected StoreFile createMockStoreFile(final long sizeInBytes, final long seqId) {
-    StoreFile mockSf = mock(StoreFile.class);
+  protected HStoreFile createMockStoreFile(final long sizeInBytes, final long seqId) {
+    HStoreFile mockSf = mock(HStoreFile.class);
     StoreFileReader reader = mock(StoreFileReader.class);
-    String stringPath = "/hbase/testTable/regionA/"
-        + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random);
+    String stringPath = "/hbase/testTable/regionA/" +
+        RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random);
     Path path = new Path(stringPath);
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 2dbc26f..5554683 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -26,13 +26,11 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -45,9 +43,6 @@ import org.junit.runners.Parameterized;
 @RunWith(Parameterized.class)
 public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
 
-
-  private static final Log LOG = LogFactory.getLog(PerfTestCompactionPolicies.class);
-
   private final RatioBasedCompactionPolicy cp;
   private final StoreFileListGenerator generator;
   private final HStore store;
@@ -62,13 +57,13 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
 
 
 
-    Class[] policyClasses = new Class[]{
+    Class<?>[] policyClasses = new Class[]{
         EverythingPolicy.class,
         RatioBasedCompactionPolicy.class,
         ExploringCompactionPolicy.class,
     };
 
-    Class[] fileListGenClasses = new Class[]{
+    Class<?>[] fileListGenClasses = new Class[]{
         ExplicitFileListGenerator.class,
         ConstantSizeFileListGenerator.class,
         SemiConstantSizeFileListGenerator.class,
@@ -88,12 +83,12 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
         * policyClasses.length);
 
 
-    for (Class policyClass :  policyClasses) {
-      for (Class genClass: fileListGenClasses) {
-        for (int maxFile:maxFileValues) {
-          for (int minFile:minFilesValues) {
-            for (float ratio:ratioValues) {
-              params.add(new Object[] {policyClass, genClass, maxFile, minFile, ratio});
+    for (Class<?> policyClass : policyClasses) {
+      for (Class<?> genClass : fileListGenClasses) {
+        for (int maxFile : maxFileValues) {
+          for (int minFile : minFilesValues) {
+            for (float ratio : ratioValues) {
+              params.add(new Object[] { policyClass, genClass, maxFile, minFile, ratio });
             }
           }
         }
@@ -150,9 +145,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
   @Test
   public final void testSelection() throws Exception {
     long fileDiff = 0;
-    for (List<StoreFile> storeFileList : generator) {
-      List<StoreFile> currentFiles = new ArrayList<>(18);
-      for (StoreFile file : storeFileList) {
+    for (List<HStoreFile> storeFileList : generator) {
+      List<HStoreFile> currentFiles = new ArrayList<>(18);
+      for (HStoreFile file : storeFileList) {
         currentFiles.add(file);
         currentFiles = runIteration(currentFiles);
       }
@@ -172,21 +167,20 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
   }
 
 
-  private List<StoreFile> runIteration(List<StoreFile> startingStoreFiles) throws IOException {
-
-    List<StoreFile> storeFiles = new ArrayList<>(startingStoreFiles);
+  private List<HStoreFile> runIteration(List<HStoreFile> startingStoreFiles) throws IOException {
+    List<HStoreFile> storeFiles = new ArrayList<>(startingStoreFiles);
     CompactionRequest req = cp.selectCompaction(
         storeFiles, new ArrayList<>(), false, false, false);
     long newFileSize = 0;
 
-    Collection<StoreFile> filesToCompact = req.getFiles();
+    Collection<HStoreFile> filesToCompact = req.getFiles();
 
     if (!filesToCompact.isEmpty()) {
 
       storeFiles = new ArrayList<>(storeFiles);
       storeFiles.removeAll(filesToCompact);
 
-      for (StoreFile storeFile : filesToCompact) {
+      for (HStoreFile storeFile : filesToCompact) {
         newFileSize += storeFile.getReader().length();
       }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
index 5fe47f3..c89f136 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
@@ -15,14 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 class SemiConstantSizeFileListGenerator extends StoreFileListGenerator {
   SemiConstantSizeFileListGenerator() {
@@ -30,8 +29,8 @@ class SemiConstantSizeFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public Iterator<List<StoreFile>> iterator() {
-    return new Iterator<List<StoreFile>>() {
+  public Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private int count = 0;
 
       @Override
@@ -40,9 +39,9 @@ class SemiConstantSizeFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
+      public List<HStoreFile> next() {
         count += 1;
-        ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
+        ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
         for (int i = 0; i < NUM_FILES_GEN; i++) {
           files.add(createMockStoreFile(random.nextInt(5) + 30));
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
index f5f36ac..d270da7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
@@ -15,14 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 class SinusoidalFileListGenerator extends StoreFileListGenerator {
   SinusoidalFileListGenerator() {
@@ -30,10 +29,8 @@ class SinusoidalFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public Iterator<List<StoreFile>> iterator() {
-
-
-    return new Iterator<List<StoreFile>>() {
+  public Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private int count = 0;
       @Override
       public boolean hasNext() {
@@ -41,9 +38,9 @@ class SinusoidalFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
+      public List<HStoreFile> next() {
         count += 1;
-        ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
+        ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
         for (int x = 0; x < NUM_FILES_GEN; x++) {
           int fileSize = (int) Math.abs(64 * Math.sin((Math.PI * x) / 50.0)) + 1;
           files.add(createMockStoreFile(fileSize));

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
index 5201eb7..bed1342 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
@@ -15,14 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 class SpikyFileListGenerator extends StoreFileListGenerator {
 
@@ -31,8 +30,8 @@ class SpikyFileListGenerator extends StoreFileListGenerator {
   }
 
   @Override
-  public Iterator<List<StoreFile>> iterator() {
-    return new Iterator<List<StoreFile>>() {
+  public Iterator<List<HStoreFile>> iterator() {
+    return new Iterator<List<HStoreFile>>() {
       private int count = 0;
 
       @Override
@@ -41,9 +40,9 @@ class SpikyFileListGenerator extends StoreFileListGenerator {
       }
 
       @Override
-      public List<StoreFile> next() {
+      public List<HStoreFile> next() {
         count += 1;
-        ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
+        ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
         for (int x = 0; x < NUM_FILES_GEN; x++) {
           int fileSize = random.nextInt(5) + 1;
           if ( x % 10 == 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
index 643f771..8854676 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
@@ -15,20 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.compactions;
 
 import java.util.List;
 
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
-public abstract class StoreFileListGenerator
-    extends MockStoreFileGenerator implements Iterable<List<StoreFile>> {
+public abstract class StoreFileListGenerator extends MockStoreFileGenerator
+    implements Iterable<List<HStoreFile>> {
 
   public static final int MAX_FILE_GEN_ITERS = 10;
   public static final int NUM_FILES_GEN = 1000;
 
-  StoreFileListGenerator(final Class klass) {
+  StoreFileListGenerator(Class<?> klass) {
     super(klass);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java
index a71e766..e30383e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactedHFilesDischarger.java
@@ -47,8 +47,6 @@ import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
-import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -60,7 +58,7 @@ import org.junit.experimental.categories.Category;
 @Category({ MediumTests.class, RegionServerTests.class })
 public class TestCompactedHFilesDischarger {
   private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
-  private Region region;
+  private HRegion region;
   private final static byte[] fam = Bytes.toBytes("cf_1");
   private final static byte[] qual1 = Bytes.toBytes("qf_1");
   private final static byte[] val = Bytes.toBytes("val");
@@ -120,21 +118,21 @@ public class TestCompactedHFilesDischarger {
     // flush them
     region.flush(true);
 
-    Store store = region.getStore(fam);
+    HStore store = region.getStore(fam);
     assertEquals(3, store.getStorefilesCount());
 
-    Collection<StoreFile> storefiles = store.getStorefiles();
-    Collection<StoreFile> compactedfiles =
-        ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
+    Collection<HStoreFile> storefiles = store.getStorefiles();
+    Collection<HStoreFile> compactedfiles =
+        store.getStoreEngine().getStoreFileManager().getCompactedfiles();
     // None of the files should be in compacted state.
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       assertFalse(file.isCompactedAway());
     }
     // Try to run the cleaner without compaction. there should not be any change
     cleaner.chore();
     storefiles = store.getStorefiles();
     // None of the files should be in compacted state.
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       assertFalse(file.isCompactedAway());
     }
     // now do some compaction
@@ -149,7 +147,7 @@ public class TestCompactedHFilesDischarger {
     cleaner.chore();
     assertEquals(1, store.getStorefilesCount());
     storefiles = store.getStorefiles();
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       // Should not be in compacted state
       assertFalse(file.isCompactedAway());
     }
@@ -186,14 +184,14 @@ public class TestCompactedHFilesDischarger {
     // flush them
     region.flush(true);
 
-    Store store = region.getStore(fam);
+    HStore store = region.getStore(fam);
     assertEquals(3, store.getStorefilesCount());
 
-    Collection<StoreFile> storefiles = store.getStorefiles();
-    Collection<StoreFile> compactedfiles =
-        ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
+    Collection<HStoreFile> storefiles = store.getStorefiles();
+    Collection<HStoreFile> compactedfiles =
+        store.getStoreEngine().getStoreFileManager().getCompactedfiles();
     // None of the files should be in compacted state.
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       assertFalse(file.isCompactedAway());
     }
     // Do compaction
@@ -203,13 +201,13 @@ public class TestCompactedHFilesDischarger {
     storefiles = store.getStorefiles();
     int usedReaderCount = 0;
     int unusedReaderCount = 0;
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       if (((HStoreFile) file).getRefCount() == 3) {
         usedReaderCount++;
       }
     }
     compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
-    for(StoreFile file : compactedfiles) {
+    for(HStoreFile file : compactedfiles) {
       assertEquals("Refcount should be 3", 0, ((HStoreFile) file).getRefCount());
       unusedReaderCount++;
     }
@@ -221,7 +219,7 @@ public class TestCompactedHFilesDischarger {
     countDown();
     assertEquals(1, store.getStorefilesCount());
     storefiles = store.getStorefiles();
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       // Should not be in compacted state
       assertFalse(file.isCompactedAway());
     }
@@ -257,14 +255,14 @@ public class TestCompactedHFilesDischarger {
     // flush them
     region.flush(true);
 
-    Store store = region.getStore(fam);
+    HStore store = region.getStore(fam);
     assertEquals(3, store.getStorefilesCount());
 
-    Collection<StoreFile> storefiles = store.getStorefiles();
-    Collection<StoreFile> compactedfiles =
-        ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
+    Collection<HStoreFile> storefiles = store.getStorefiles();
+    Collection<HStoreFile> compactedfiles =
+        store.getStoreEngine().getStoreFileManager().getCompactedfiles();
     // None of the files should be in compacted state.
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       assertFalse(file.isCompactedAway());
     }
     startScannerThreads();
@@ -274,14 +272,13 @@ public class TestCompactedHFilesDischarger {
     storefiles = store.getStorefiles();
     int usedReaderCount = 0;
     int unusedReaderCount = 0;
-    for (StoreFile file : storefiles) {
-      if (((HStoreFile) file).getRefCount() == 0) {
+    for (HStoreFile file : storefiles) {
+      if (file.getRefCount() == 0) {
         unusedReaderCount++;
       }
     }
-    compactedfiles =
-        ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
-    for(StoreFile file : compactedfiles) {
+    compactedfiles = store.getStoreEngine().getStoreFileManager().getCompactedfiles();
+    for(HStoreFile file : compactedfiles) {
       assertEquals("Refcount should be 3", 3, ((HStoreFile) file).getRefCount());
       usedReaderCount++;
     }
@@ -307,14 +304,14 @@ public class TestCompactedHFilesDischarger {
     storefiles = store.getStorefiles();
     usedReaderCount = 0;
     unusedReaderCount = 0;
-    for (StoreFile file : storefiles) {
-      if (((HStoreFile) file).getRefCount() == 3) {
+    for (HStoreFile file : storefiles) {
+      if (file.getRefCount() == 3) {
         usedReaderCount++;
       }
     }
     compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
-    for(StoreFile file : compactedfiles) {
-      assertEquals("Refcount should be 0", 0, ((HStoreFile) file).getRefCount());
+    for (HStoreFile file : compactedfiles) {
+      assertEquals("Refcount should be 0", 0, file.getRefCount());
       unusedReaderCount++;
     }
     // Though there are files we are not using them for reads
@@ -329,7 +326,7 @@ public class TestCompactedHFilesDischarger {
     // Now the cleaner should be able to clear it up because there are no active readers
     assertEquals(1, store.getStorefilesCount());
     storefiles = store.getStorefiles();
-    for (StoreFile file : storefiles) {
+    for (HStoreFile file : storefiles) {
       // Should not be in compacted state
       assertFalse(file.isCompactedAway());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index 170fba2..eb4801a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -43,9 +43,9 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
@@ -56,10 +56,10 @@ import org.mockito.stubbing.Answer;
 
 public class TestCompactor {
 
-  public static StoreFile createDummyStoreFile(long maxSequenceId) throws Exception {
+  public static HStoreFile createDummyStoreFile(long maxSequenceId) throws Exception {
     // "Files" are totally unused, it's Scanner class below that gives compactor fake KVs.
     // But compaction depends on everything under the sun, so stub everything with dummies.
-    StoreFile sf = mock(StoreFile.class);
+    HStoreFile sf = mock(HStoreFile.class);
     StoreFileReader r = mock(StoreFileReader.class);
     when(r.length()).thenReturn(1L);
     when(r.getBloomFilterType()).thenReturn(BloomType.NONE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
index ca7c0ae..ef0c931 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java
@@ -42,11 +42,11 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanInfo;
 import org.apache.hadoop.hbase.regionserver.ScanType;
-import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StoreUtils;
 import org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner;
@@ -87,14 +87,14 @@ public class TestDateTieredCompactor {
   public boolean usePrivateReaders;
 
   private DateTieredCompactor createCompactor(StoreFileWritersCapture writers,
-      final KeyValue[] input, List<StoreFile> storefiles) throws Exception {
+      final KeyValue[] input, List<HStoreFile> storefiles) throws Exception {
     Configuration conf = HBaseConfiguration.create();
     conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
     final Scanner scanner = new Scanner(input);
     // Create store mock that is satisfactory for compactor.
     HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
     ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
-    final Store store = mock(Store.class);
+    HStore store = mock(HStore.class);
     when(store.getStorefiles()).thenReturn(storefiles);
     when(store.getColumnFamilyDescriptor()).thenReturn(col);
     when(store.getScanInfo()).thenReturn(si);
@@ -109,14 +109,14 @@ public class TestDateTieredCompactor {
 
     return new DateTieredCompactor(conf, store) {
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow,
           byte[] dropDeletesToRow) throws IOException {
         return scanner;
       }
 
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
         return scanner;
       }
@@ -126,8 +126,8 @@ public class TestDateTieredCompactor {
   private void verify(KeyValue[] input, List<Long> boundaries, KeyValue[][] output,
       boolean allFiles) throws Exception {
     StoreFileWritersCapture writers = new StoreFileWritersCapture();
-    StoreFile sf1 = createDummyStoreFile(1L);
-    StoreFile sf2 = createDummyStoreFile(2L);
+    HStoreFile sf1 = createDummyStoreFile(1L);
+    HStoreFile sf2 = createDummyStoreFile(2L);
     DateTieredCompactor dtc = createCompactor(writers, input, Arrays.asList(sf1, sf2));
     List<Path> paths = dtc.compact(new CompactionRequest(Arrays.asList(sf1)),
       boundaries.subList(0, boundaries.size() - 1), NoLimitThroughputController.INSTANCE, null);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index 49c3cba..1249fee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -36,9 +36,6 @@ import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -58,12 +55,12 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
-import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter;
@@ -88,6 +85,9 @@ import org.junit.runners.Parameterized.Parameter;
 import org.junit.runners.Parameterized.Parameters;
 import org.mockito.ArgumentMatcher;
 
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+
 @RunWith(Parameterized.class)
 @Category({RegionServerTests.class, SmallTests.class})
 public class TestStripeCompactionPolicy {
@@ -163,14 +163,14 @@ public class TestStripeCompactionPolicy {
     StripeCompactionPolicy policy = new StripeCompactionPolicy(conf, sci, ssc) {
       @Override
       public StripeCompactionRequest selectCompaction(StripeInformationProvider si,
-          List<StoreFile> filesCompacting, boolean isOffpeak) throws IOException {
+          List<HStoreFile> filesCompacting, boolean isOffpeak) throws IOException {
         if (!filesCompacting.isEmpty()) return null;
         return selectSingleStripeCompaction(si, false, false, isOffpeak);
       }
 
       @Override
       public boolean needsCompactions(
-          StripeInformationProvider si, List<StoreFile> filesCompacting) {
+          StripeInformationProvider si, List<HStoreFile> filesCompacting) {
         if (!filesCompacting.isEmpty()) return false;
         return needsSingleStripeCompaction(si);
       }
@@ -202,7 +202,7 @@ public class TestStripeCompactionPolicy {
     verifySingleStripeCompaction(policy, si, 1, null);
     // Verify max count is respected.
     si = createStripesWithSizes(0, 0, new Long[] { 5L }, new Long[] { 5L, 4L, 4L, 4L, 4L });
-    List<StoreFile> sfs = si.getStripes().get(1).subList(1, 5);
+    List<HStoreFile> sfs = si.getStripes().get(1).subList(1, 5);
     verifyCompaction(policy, si, sfs, null, 1, null, si.getStartRow(1), si.getEndRow(1), true);
     // Verify ratio is applied.
     si = createStripesWithSizes(0, 0, new Long[] { 5L }, new Long[] { 50L, 4L, 4L, 4L, 4L });
@@ -222,10 +222,10 @@ public class TestStripeCompactionPolicy {
   public void testWithReferences() throws Exception {
     StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create());
     StripeCompactor sc = mock(StripeCompactor.class);
-    StoreFile ref = createFile();
+    HStoreFile ref = createFile();
     when(ref.isReference()).thenReturn(true);
     StripeInformationProvider si = mock(StripeInformationProvider.class);
-    Collection<StoreFile> sfs = al(ref, createFile());
+    Collection<HStoreFile> sfs = al(ref, createFile());
     when(si.getStorefiles()).thenReturn(sfs);
 
     assertTrue(policy.needsCompactions(si, al()));
@@ -349,12 +349,12 @@ public class TestStripeCompactionPolicy {
     edge.setValue(now);
     EnvironmentEdgeManager.injectEdge(edge);
     try {
-      StoreFile expiredFile = createFile(), notExpiredFile = createFile();
+      HStoreFile expiredFile = createFile(), notExpiredFile = createFile();
       when(expiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl - 1);
       when(notExpiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl + 1);
-      List<StoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
-      List<StoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
-      List<StoreFile> mixed = Lists.newArrayList(expiredFile, notExpiredFile);
+      List<HStoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
+      List<HStoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
+      List<HStoreFile> mixed = Lists.newArrayList(expiredFile, notExpiredFile);
 
       StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create(),
           defaultSplitSize, defaultSplitCount, defaultInitialCount, true);
@@ -389,11 +389,11 @@ public class TestStripeCompactionPolicy {
     edge.setValue(now);
     EnvironmentEdgeManager.injectEdge(edge);
     try {
-      StoreFile expiredFile = createFile(), notExpiredFile = createFile();
+      HStoreFile expiredFile = createFile(), notExpiredFile = createFile();
       when(expiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl - 1);
       when(notExpiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl + 1);
-      List<StoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
-      List<StoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
+      List<HStoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
+      List<HStoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
 
       StripeCompactionPolicy policy =
           createPolicy(HBaseConfiguration.create(), defaultSplitSize, defaultSplitCount,
@@ -414,7 +414,7 @@ public class TestStripeCompactionPolicy {
 
   @SuppressWarnings("unchecked")
   private static StripeCompactionPolicy.StripeInformationProvider createStripesWithFiles(
-      List<StoreFile>... stripeFiles) throws Exception {
+      List<HStoreFile>... stripeFiles) throws Exception {
     return createStripesWithFiles(createBoundaries(stripeFiles.length),
         Lists.newArrayList(stripeFiles), new ArrayList<>());
   }
@@ -434,7 +434,7 @@ public class TestStripeCompactionPolicy {
     verifySingleStripeCompaction(policy, si, 0, false);
     // Unless there are enough to cause L0 compaction.
     si = createStripesWithSizes(6, 2, stripes);
-    ConcatenatedLists<StoreFile> sfs = new ConcatenatedLists<>();
+    ConcatenatedLists<HStoreFile> sfs = new ConcatenatedLists<>();
     sfs.addSublist(si.getLevel0Files());
     sfs.addSublist(si.getStripes().get(0));
     verifyCompaction(
@@ -447,12 +447,13 @@ public class TestStripeCompactionPolicy {
     // if all files of stripe aren't selected, delete must not be dropped.
     stripes = new Long[][] { new Long[] { 100L, 3L, 2L, 2L, 2L }, new Long[] { 6L } };
     si = createStripesWithSizes(0, 0, stripes);
-    List<StoreFile> compact_file = new ArrayList<>();
-    Iterator<StoreFile> iter = si.getStripes().get(0).listIterator(1);
+    List<HStoreFile> compactFile = new ArrayList<>();
+    Iterator<HStoreFile> iter = si.getStripes().get(0).listIterator(1);
     while (iter.hasNext()) {
-        compact_file.add(iter.next());
+        compactFile.add(iter.next());
     }
-    verifyCompaction(policy, si, compact_file, false, 1, null, si.getStartRow(0), si.getEndRow(0), true);
+    verifyCompaction(policy, si, compactFile, false, 1, null, si.getStartRow(0), si.getEndRow(0),
+      true);
   }
 
   /********* HELPER METHODS ************/
@@ -472,14 +473,14 @@ public class TestStripeCompactionPolicy {
     return new StripeCompactionPolicy(conf, sci, ssc);
   }
 
-  private static ArrayList<StoreFile> al(StoreFile... sfs) {
+  private static ArrayList<HStoreFile> al(HStoreFile... sfs) {
     return new ArrayList<>(Arrays.asList(sfs));
   }
 
   private void verifyMergeCompatcion(StripeCompactionPolicy policy, StripeInformationProvider si,
       int from, int to) throws Exception {
     StripeCompactionPolicy.StripeCompactionRequest scr = policy.selectCompaction(si, al(), false);
-    Collection<StoreFile> sfs = getAllFiles(si, from, to);
+    Collection<HStoreFile> sfs = getAllFiles(si, from, to);
     verifyCollectionsEqual(sfs, scr.getRequest().getFiles());
 
     // All the Stripes are expired, so the Compactor will not create any Writers. We need to create
@@ -538,7 +539,7 @@ public class TestStripeCompactionPolicy {
    * @param boundaries Expected target stripe boundaries.
    */
   private void verifyCompaction(StripeCompactionPolicy policy, StripeInformationProvider si,
-      Collection<StoreFile> sfs, byte[] dropDeletesFrom, byte[] dropDeletesTo,
+      Collection<HStoreFile> sfs, byte[] dropDeletesFrom, byte[] dropDeletesTo,
       final List<byte[]> boundaries) throws Exception {
     StripeCompactor sc = mock(StripeCompactor.class);
     assertTrue(policy.needsCompactions(si, al()));
@@ -573,7 +574,7 @@ public class TestStripeCompactionPolicy {
    * @param righr Right boundary of the compaction.
    */
   private void verifyCompaction(StripeCompactionPolicy policy, StripeInformationProvider si,
-      Collection<StoreFile> sfs, Boolean dropDeletes, Integer count, Long size,
+      Collection<HStoreFile> sfs, Boolean dropDeletes, Integer count, Long size,
       byte[] start, byte[] end, boolean needsCompaction) throws IOException {
     StripeCompactor sc = mock(StripeCompactor.class);
     assertTrue(!needsCompaction || policy.needsCompactions(si, al()));
@@ -612,15 +613,15 @@ public class TestStripeCompactionPolicy {
             : (dropDeletes.booleanValue() ? aryEq(value) : isNull(byte[].class));
   }
 
-  private void verifyCollectionsEqual(Collection<StoreFile> sfs, Collection<StoreFile> scr) {
+  private void verifyCollectionsEqual(Collection<HStoreFile> sfs, Collection<HStoreFile> scr) {
     // Dumb.
     assertEquals(sfs.size(), scr.size());
     assertTrue(scr.containsAll(sfs));
   }
 
-  private static List<StoreFile> getAllFiles(
+  private static List<HStoreFile> getAllFiles(
       StripeInformationProvider si, int fromStripe, int toStripe) {
-    ArrayList<StoreFile> expected = new ArrayList<>();
+    ArrayList<HStoreFile> expected = new ArrayList<>();
     for (int i = fromStripe; i <= toStripe; ++i) {
       expected.addAll(si.getStripes().get(i));
     }
@@ -694,15 +695,15 @@ public class TestStripeCompactionPolicy {
 
   private static StripeInformationProvider createStripes(List<byte[]> boundaries,
       List<List<Long>> stripeSizes, List<Long> l0Sizes) throws Exception {
-    List<List<StoreFile>> stripeFiles = new ArrayList<>(stripeSizes.size());
+    List<List<HStoreFile>> stripeFiles = new ArrayList<>(stripeSizes.size());
     for (List<Long> sizes : stripeSizes) {
-      List<StoreFile> sfs = new ArrayList<>(sizes.size());
+      List<HStoreFile> sfs = new ArrayList<>(sizes.size());
       for (Long size : sizes) {
         sfs.add(createFile(size));
       }
       stripeFiles.add(sfs);
     }
-    List<StoreFile> l0Files = new ArrayList<>();
+    List<HStoreFile> l0Files = new ArrayList<>();
     for (Long size : l0Sizes) {
       l0Files.add(createFile(size));
     }
@@ -713,8 +714,8 @@ public class TestStripeCompactionPolicy {
    * This method actually does all the work.
    */
   private static StripeInformationProvider createStripesWithFiles(List<byte[]> boundaries,
-      List<List<StoreFile>> stripeFiles, List<StoreFile> l0Files) throws Exception {
-    ArrayList<ImmutableList<StoreFile>> stripes = new ArrayList<>();
+      List<List<HStoreFile>> stripeFiles, List<HStoreFile> l0Files) throws Exception {
+    ArrayList<ImmutableList<HStoreFile>> stripes = new ArrayList<>();
     ArrayList<byte[]> boundariesList = new ArrayList<>();
     StripeInformationProvider si = mock(StripeInformationProvider.class);
     if (!stripeFiles.isEmpty()) {
@@ -724,7 +725,7 @@ public class TestStripeCompactionPolicy {
         byte[] startKey = ((i == 0) ? OPEN_KEY : boundaries.get(i - 1));
         byte[] endKey = ((i == boundaries.size()) ? OPEN_KEY : boundaries.get(i));
         boundariesList.add(endKey);
-        for (StoreFile sf : stripeFiles.get(i)) {
+        for (HStoreFile sf : stripeFiles.get(i)) {
           setFileStripe(sf, startKey, endKey);
         }
         stripes.add(ImmutableList.copyOf(stripeFiles.get(i)));
@@ -732,7 +733,7 @@ public class TestStripeCompactionPolicy {
         when(si.getEndRow(eq(i))).thenReturn(endKey);
       }
     }
-    ConcatenatedLists<StoreFile> sfs = new ConcatenatedLists<>();
+    ConcatenatedLists<HStoreFile> sfs = new ConcatenatedLists<>();
     sfs.addAllSublists(stripes);
     sfs.addSublist(l0Files);
     when(si.getStorefiles()).thenReturn(sfs);
@@ -743,8 +744,8 @@ public class TestStripeCompactionPolicy {
     return si;
   }
 
-  private static StoreFile createFile(long size) throws Exception {
-    StoreFile sf = mock(StoreFile.class);
+  private static HStoreFile createFile(long size) throws Exception {
+    HStoreFile sf = mock(HStoreFile.class);
     when(sf.getPath()).thenReturn(new Path("moo"));
     StoreFileReader r = mock(StoreFileReader.class);
     when(r.getEntries()).thenReturn(size);
@@ -758,11 +759,11 @@ public class TestStripeCompactionPolicy {
     return sf;
   }
 
-  private static StoreFile createFile() throws Exception {
+  private static HStoreFile createFile() throws Exception {
     return createFile(0);
   }
 
-  private static void setFileStripe(StoreFile sf, byte[] startKey, byte[] endKey) {
+  private static void setFileStripe(HStoreFile sf, byte[] startKey, byte[] endKey) {
     when(sf.getMetadataValue(StripeStoreFileManager.STRIPE_START_KEY)).thenReturn(startKey);
     when(sf.getMetadataValue(StripeStoreFileManager.STRIPE_END_KEY)).thenReturn(endKey);
   }
@@ -770,7 +771,7 @@ public class TestStripeCompactionPolicy {
   private StripeCompactor createCompactor() throws Exception {
     HColumnDescriptor col = new HColumnDescriptor(Bytes.toBytes("foo"));
     StoreFileWritersCapture writers = new StoreFileWritersCapture();
-    Store store = mock(Store.class);
+    HStore store = mock(HStore.class);
     HRegionInfo info = mock(HRegionInfo.class);
     when(info.getRegionNameAsString()).thenReturn("testRegion");
     when(store.getColumnFamilyDescriptor()).thenReturn(col);
@@ -784,14 +785,14 @@ public class TestStripeCompactionPolicy {
     final Scanner scanner = new Scanner();
     return new StripeCompactor(conf, store) {
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow,
           byte[] dropDeletesToRow) throws IOException {
         return scanner;
       }
 
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
         return scanner;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
index e51a675..bd3a803 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java
@@ -42,10 +42,10 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanInfo;
 import org.apache.hadoop.hbase.regionserver.ScanType;
-import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner;
 import org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture;
@@ -195,7 +195,7 @@ public class TestStripeCompactor {
     // Create store mock that is satisfactory for compactor.
     HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
     ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
-    Store store = mock(Store.class);
+    HStore store = mock(HStore.class);
     when(store.getColumnFamilyDescriptor()).thenReturn(col);
     when(store.getScanInfo()).thenReturn(si);
     when(store.areWritesEnabled()).thenReturn(true);
@@ -207,14 +207,14 @@ public class TestStripeCompactor {
 
     return new StripeCompactor(conf, store) {
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow,
           byte[] dropDeletesToRow) throws IOException {
         return scanner;
       }
 
       @Override
-      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
+      protected InternalScanner createScanner(HStore store, List<StoreFileScanner> scanners,
           ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
         return scanner;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
index d25829d..391155e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
@@ -51,7 +51,17 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -60,7 +70,6 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;
 import org.apache.hadoop.hbase.regionserver.DefaultStoreFlusher;
 import org.apache.hadoop.hbase.regionserver.FlushRequestListener;
@@ -101,6 +110,8 @@ import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
+
 /**
  * Test replay of edits out of a WAL split.
  */
@@ -636,7 +647,7 @@ public abstract class AbstractTestWALReplay {
     // Switch between throw and not throw exception in flush
     static final AtomicBoolean throwExceptionWhenFlushing = new AtomicBoolean(false);
 
-    public CustomStoreFlusher(Configuration conf, Store store) {
+    public CustomStoreFlusher(Configuration conf, HStore store) {
       super(conf, store);
     }
     @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/a5f84430/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index 2b5c78c..9dc13d9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -62,9 +62,9 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
@@ -843,10 +843,10 @@ public abstract class TestVisibilityLabels {
     }
     TEST_UTIL.getAdmin().flush(tableName);
     List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName);
-    Store store = regions.get(0).getStore(fam);
-    Collection<StoreFile> storefiles = store.getStorefiles();
+    HStore store = regions.get(0).getStore(fam);
+    Collection<HStoreFile> storefiles = store.getStorefiles();
     assertTrue(storefiles.size() > 0);
-    for (StoreFile storeFile : storefiles) {
+    for (HStoreFile storeFile : storefiles) {
       assertTrue(storeFile.getReader().getHFileReader().getFileContext().isIncludesTags());
     }
   }