You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2019/11/24 14:45:27 UTC

[hbase-site] branch asf-site updated: Published site at b08697ae4a347f34273253e33ba91bb6b7ade5e0.

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 76a7632  Published site at b08697ae4a347f34273253e33ba91bb6b7ade5e0.
76a7632 is described below

commit 76a7632fc5162e197483e9f819bc54c96f62310b
Author: jenkins <bu...@apache.org>
AuthorDate: Sun Nov 24 14:45:08 2019 +0000

    Published site at b08697ae4a347f34273253e33ba91bb6b7ade5e0.
---
 acid-semantics.html                                |     2 +-
 apache_hbase_reference_guide.pdf                   |     4 +-
 book.html                                          |     2 +-
 bulk-loads.html                                    |     2 +-
 checkstyle-aggregate.html                          |    86 +-
 checkstyle.rss                                     |    16 +-
 coc.html                                           |     2 +-
 dependencies.html                                  |     2 +-
 dependency-convergence.html                        |     2 +-
 dependency-info.html                               |     2 +-
 dependency-management.html                         |     2 +-
 devapidocs/allclasses-frame.html                   |     1 +
 devapidocs/allclasses-noframe.html                 |     1 +
 devapidocs/constant-values.html                    |     4 +-
 devapidocs/index-all.html                          |    20 +
 .../hadoop/hbase/backup/HFileArchiver.File.html    |    24 +-
 .../hbase/backup/HFileArchiver.FileConverter.html  |     6 +-
 .../backup/HFileArchiver.FileStatusConverter.html  |     6 +-
 .../hbase/backup/HFileArchiver.FileablePath.html   |    20 +-
 .../backup/HFileArchiver.FileableStoreFile.html    |    18 +-
 .../hbase/backup/HFileArchiver.StoreToFile.html    |     6 +-
 .../apache/hadoop/hbase/backup/HFileArchiver.html  |   140 +-
 .../backup/class-use/FailedArchiveException.html   |    49 +-
 .../apache/hadoop/hbase/backup/package-tree.html   |     4 +-
 .../apache/hadoop/hbase/backup/package-use.html    |    15 +-
 .../hadoop/hbase/client/class-use/RegionInfo.html  |    46 +-
 .../apache/hadoop/hbase/client/package-tree.html   |    20 +-
 .../hadoop/hbase/coprocessor/package-tree.html     |     2 +-
 .../apache/hadoop/hbase/executor/package-tree.html |     2 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |     8 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |     2 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |     6 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |     4 +-
 .../hbase/master/assignment/package-tree.html      |     2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |     4 +-
 .../hbase/master/procedure/package-tree.html       |     2 +-
 .../hadoop/hbase/monitoring/package-tree.html      |     2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |    16 +-
 .../hadoop/hbase/procedure2/package-tree.html      |     6 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |     6 +-
 .../apache/hadoop/hbase/regionserver/HRegion.html  |     6 +-
 .../hbase/regionserver/HRegionFileSystem.html      |    16 +-
 .../regionserver/HRegionServerCommandLine.html     |     4 +-
 .../hbase/regionserver/HRegionWALFileSystem.html   |   320 +
 .../apache/hadoop/hbase/regionserver/HStore.html   |     4 +-
 .../regionserver/class-use/HRegionFileSystem.html  |    19 +-
 .../class-use/HRegionWALFileSystem.html}           |    39 +-
 .../hbase/regionserver/class-use/HStoreFile.html   |   169 +-
 .../hadoop/hbase/regionserver/package-frame.html   |     1 +
 .../hadoop/hbase/regionserver/package-summary.html |   286 +-
 .../hadoop/hbase/regionserver/package-tree.html    |    22 +-
 .../hadoop/hbase/regionserver/package-use.html     |   283 +-
 .../regionserver/querymatcher/package-tree.html    |     2 +-
 .../hbase/regionserver/wal/package-tree.html       |     2 +-
 .../replication/regionserver/package-tree.html     |     2 +-
 .../hadoop/hbase/security/access/package-tree.html |     4 +-
 .../apache/hadoop/hbase/security/package-tree.html |     2 +-
 .../apache/hadoop/hbase/thrift/package-tree.html   |     4 +-
 .../apache/hadoop/hbase/util/HFileArchiveUtil.html |    53 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |     8 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |     2 +-
 devapidocs/overview-tree.html                      |     6 +-
 .../src-html/org/apache/hadoop/hbase/Version.html  |     4 +-
 .../hadoop/hbase/backup/HFileArchiver.File.html    |  1448 +--
 .../hbase/backup/HFileArchiver.FileConverter.html  |  1448 +--
 .../backup/HFileArchiver.FileStatusConverter.html  |  1448 +--
 .../hbase/backup/HFileArchiver.FileablePath.html   |  1448 +--
 .../backup/HFileArchiver.FileableStoreFile.html    |  1448 +--
 .../hbase/backup/HFileArchiver.StoreToFile.html    |  1448 +--
 .../apache/hadoop/hbase/backup/HFileArchiver.html  |  1448 +--
 .../HRegion.BatchOperation.Visitor.html            |     6 +-
 .../hbase/regionserver/HRegion.BatchOperation.html |     6 +-
 .../regionserver/HRegion.BulkLoadListener.html     |     6 +-
 .../regionserver/HRegion.FlushResult.Result.html   |     6 +-
 .../hbase/regionserver/HRegion.FlushResult.html    |     6 +-
 .../regionserver/HRegion.FlushResultImpl.html      |     6 +-
 .../HRegion.MutationBatchOperation.html            |     6 +-
 .../HRegion.ObservedExceptionsInBatch.html         |     6 +-
 .../regionserver/HRegion.PrepareFlushResult.html   |     6 +-
 .../regionserver/HRegion.RegionScannerImpl.html    |     6 +-
 .../regionserver/HRegion.ReplayBatchOperation.html |     6 +-
 .../hbase/regionserver/HRegion.RowLockContext.html |     6 +-
 .../hbase/regionserver/HRegion.RowLockImpl.html    |     6 +-
 .../hbase/regionserver/HRegion.WriteState.html     |     6 +-
 .../apache/hadoop/hbase/regionserver/HRegion.html  |     6 +-
 .../hbase/regionserver/HRegionFileSystem.html      |     6 +-
 .../hbase/regionserver/HRegionWALFileSystem.html   |   123 +
 .../apache/hadoop/hbase/util/HFileArchiveUtil.html |   217 +-
 downloads.html                                     |     2 +-
 export_control.html                                |     2 +-
 index.html                                         |     2 +-
 issue-tracking.html                                |     2 +-
 mail-lists.html                                    |     2 +-
 metrics.html                                       |     2 +-
 old_news.html                                      |     2 +-
 plugin-management.html                             |     2 +-
 plugins.html                                       |     2 +-
 poweredbyhbase.html                                |     2 +-
 project-info.html                                  |     2 +-
 project-reports.html                               |     2 +-
 project-summary.html                               |     2 +-
 pseudo-distributed.html                            |     2 +-
 replication.html                                   |     2 +-
 resources.html                                     |     2 +-
 source-repository.html                             |     2 +-
 sponsors.html                                      |     2 +-
 supportingprojects.html                            |     2 +-
 team-list.html                                     |     2 +-
 testdevapidocs/allclasses-frame.html               |     1 +
 testdevapidocs/allclasses-noframe.html             |     1 +
 testdevapidocs/index-all.html                      |    24 +
 .../TestHFileArchiving.ArchivingFunction.html      |   248 +
 .../hadoop/hbase/backup/TestHFileArchiving.html    |   265 +-
 .../hadoop/hbase/backup/TestIncrementalBackup.html |     4 +-
 .../TestHFileArchiving.ArchivingFunction.html      |    37 +-
 .../apache/hadoop/hbase/backup/package-frame.html  |     4 +
 .../hadoop/hbase/backup/package-summary.html       |    15 +
 .../apache/hadoop/hbase/backup/package-tree.html   |     6 +-
 .../apache/hadoop/hbase/backup/package-use.html    |     3 +
 .../org/apache/hadoop/hbase/package-tree.html      |     8 +-
 .../hadoop/hbase/procedure2/package-tree.html      |     4 +-
 ...veConcurrentClose.WaitingHRegionFileSystem.html |     2 +-
 .../hbase/regionserver/TestHRegion.Appender.html   |    18 +-
 .../regionserver/TestHRegion.FlushThread.html      |    16 +-
 .../TestHRegion.GetTillDoneOrException.html        |    14 +-
 .../TestHRegion.HRegionForTesting.html             |     8 +-
 .../regionserver/TestHRegion.HRegionWithSeqId.html |     6 +-
 .../regionserver/TestHRegion.HStoreForTesting.html |     6 +-
 .../regionserver/TestHRegion.Incrementer.html      |    18 +-
 .../regionserver/TestHRegion.IsFlushWALMarker.html |    10 +-
 .../hbase/regionserver/TestHRegion.PutThread.html  |    24 +-
 .../hadoop/hbase/regionserver/TestHRegion.html     |   546 +-
 .../regionserver/TestHRegionWithInMemoryFlush.html |     2 +-
 ...ileRefresherChore.FailingHRegionFileSystem.html |     2 +-
 .../hadoop/hbase/regionserver/package-tree.html    |     4 +-
 .../org/apache/hadoop/hbase/test/package-tree.html |     2 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |     2 +-
 testdevapidocs/overview-tree.html                  |     1 +
 .../TestHFileArchiving.ArchivingFunction.html      |   802 ++
 .../hadoop/hbase/backup/TestHFileArchiving.html    |  1267 +-
 .../hbase/regionserver/TestHRegion.Appender.html   | 12240 ++++++++++---------
 .../regionserver/TestHRegion.FlushThread.html      | 12240 ++++++++++---------
 .../TestHRegion.GetTillDoneOrException.html        | 12240 ++++++++++---------
 .../TestHRegion.HRegionForTesting.html             | 12240 ++++++++++---------
 .../regionserver/TestHRegion.HRegionWithSeqId.html | 12240 ++++++++++---------
 .../regionserver/TestHRegion.HStoreForTesting.html | 12240 ++++++++++---------
 .../regionserver/TestHRegion.Incrementer.html      | 12240 ++++++++++---------
 .../regionserver/TestHRegion.IsFlushWALMarker.html | 12240 ++++++++++---------
 .../hbase/regionserver/TestHRegion.PutThread.html  | 12240 ++++++++++---------
 .../hadoop/hbase/regionserver/TestHRegion.html     | 12240 ++++++++++---------
 150 files changed, 70527 insertions(+), 67677 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index ab87ab2..3ba3a18 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -443,7 +443,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index da178c2..fa1fe8e 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20191123144042+00'00')
-/CreationDate (D:20191123144042+00'00')
+/ModDate (D:20191124144209+00'00')
+/CreationDate (D:20191124144209+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index b1f7014..3613cab 100644
--- a/book.html
+++ b/book.html
@@ -43869,7 +43869,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2019-11-23 14:30:58 UTC
+Last updated 2019-11-24 14:32:10 UTC
 </div>
 </div>
 </body>
diff --git a/bulk-loads.html b/bulk-loads.html
index 203bf2b..a6c11cb 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -148,7 +148,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 9be96b8..505d4a6 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -142,7 +142,7 @@
 <th><img src="images/icon_warning_sml.gif" alt="" />&#160;Warnings</th>
 <th><img src="images/icon_error_sml.gif" alt="" />&#160;Errors</th></tr>
 <tr class="b">
-<td>4088</td>
+<td>4089</td>
 <td>0</td>
 <td>0</td>
 <td>11476</td></tr></table></div>
@@ -16273,67 +16273,67 @@
 <td>design</td>
 <td>FinalClass</td>
 <td>Class HFileArchiver should be declared as final.</td>
-<td>60</td></tr>
+<td>62</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def modifier' has incorrect indentation level 8, expected level should be one of the following: 4, 6.</td>
-<td>70</td></tr>
+<td>72</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 10, expected level should be one of the following: 6, 8.</td>
-<td>72</td></tr>
+<td>74</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def rcurly' has incorrect indentation level 8, expected level should be one of the following: 4, 6.</td>
-<td>73</td></tr>
+<td>75</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'object def rcurly' has incorrect indentation level 6, expected level should be one of the following: 2, 4.</td>
-<td>74</td></tr>
+<td>76</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>315</td></tr>
+<td>354</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>631</td></tr>
+<td>667</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>637</td></tr>
+<td>673</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>654</td></tr>
+<td>690</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>656</td></tr>
+<td>692</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>692</td></tr></table></div>
+<td>728</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.Action.java">org/apache/hadoop/hbase/chaos/actions/Action.java</h3>
 <table border="0" class="table table-striped">
@@ -64312,163 +64312,163 @@
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>267</td></tr>
+<td>269</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>303</td></tr>
+<td>305</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>479</td></tr>
+<td>481</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 6, expected level should be 8.</td>
-<td>1157</td></tr>
+<td>1213</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1416</td></tr>
+<td>1472</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'else' construct must use '{}'s.</td>
-<td>1418</td></tr>
+<td>1474</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1435</td></tr>
+<td>1491</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'else' construct must use '{}'s.</td>
-<td>1437</td></tr>
+<td>1493</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2760</td></tr>
+<td>2816</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3423</td></tr>
+<td>3479</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'while' construct must use '{}'s.</td>
-<td>3493</td></tr>
+<td>3549</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>coding</td>
 <td>EmptyStatement</td>
 <td>Empty statement.</td>
-<td>3494</td></tr>
+<td>3550</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' child has incorrect indentation level 10, expected level should be 8.</td>
-<td>3661</td></tr>
+<td>3717</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>FinalClass</td>
 <td>Class PutThread should be declared as final.</td>
-<td>3672</td></tr>
+<td>3728</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>3848</td></tr>
+<td>3904</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
 <td>Must have at least one statement.</td>
-<td>3910</td></tr>
+<td>3966</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>4063</td></tr>
+<td>4119</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>4074</td></tr>
+<td>4130</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>4075</td></tr>
+<td>4131</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>4093</td></tr>
+<td>4149</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4126</td></tr>
+<td>4182</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4242</td></tr>
+<td>4298</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4328</td></tr>
+<td>4384</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4387</td></tr>
+<td>4443</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'for' construct must use '{}'s.</td>
-<td>4781</td></tr>
+<td>4837</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>MethodLength</td>
 <td>Method length is 160 lines (max allowed is 150).</td>
-<td>5221</td></tr>
+<td>5277</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>5770</td></tr></table></div>
+<td>5826</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem.java">org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java</h3>
 <table border="0" class="table table-striped">
@@ -86263,19 +86263,19 @@
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>181</td></tr>
+<td>198</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>182</td></tr>
+<td>199</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>185</td></tr></table></div>
+<td>202</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.util.HFileTestUtil.java">org/apache/hadoop/hbase/util/HFileTestUtil.java</h3>
 <table border="0" class="table table-striped">
@@ -91300,7 +91300,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle.rss b/checkstyle.rss
index 30fe1de..61bc242 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,7 +25,7 @@ under the License.
     <language>en-us</language>
     <copyright>&#169;2007 - 2019 The Apache Software Foundation</copyright>
     <item>
-      <title>File: 4088,
+      <title>File: 4089,
              Errors: 11476,
              Warnings: 0,
              Infos: 0
@@ -30748,6 +30748,20 @@ under the License.
               </tr>
                           <tr>
                 <td>
+                  <a href="https://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem.java">org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.java</a>
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+              </tr>
+                          <tr>
+                <td>
                   <a href="https://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.TestDefaultStoreEngine.java">org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java</a>
                 </td>
                 <td>
diff --git a/coc.html b/coc.html
index 41a4407..a1d71ae 100644
--- a/coc.html
+++ b/coc.html
@@ -217,7 +217,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index 69eda74..0177828 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -289,7 +289,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index cebde33..256e698 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -534,7 +534,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index 5ab2b98..7be9a38 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -170,7 +170,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index 7647e53..191579a 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -898,7 +898,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-23</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-11-24</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index b911651..c9a806c 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -1228,6 +1228,7 @@
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServer.PeriodicMemStoreFlusher</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServer.SystemExitWhenAbortTimeout</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServerCommandLine</a></li>
+<li><a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionWALFileSystem</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HStore</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HStoreFile</a></li>
 <li><a href="org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase" target="classFrame">HTableDescriptor</a></li>
diff --git a/devapidocs/allclasses-noframe.html b/devapidocs/allclasses-noframe.html
index 2bce405..9cff774 100644
--- a/devapidocs/allclasses-noframe.html
+++ b/devapidocs/allclasses-noframe.html
@@ -1228,6 +1228,7 @@
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.PeriodicMemStoreFlusher</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.SystemExitWhenAbortTimeout</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServerCommandLine</a></li>
+<li><a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></li>
 <li><a href="org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a></li>
 <li><a href="org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></li>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 0de1a0f..235b058 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -4081,14 +4081,14 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Sat Nov 23 14:36:42 UTC 2019"</code></td>
+<td class="colLast"><code>"Sun Nov 24 14:38:14 UTC 2019"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"b1df7df0e0e09f1844a0ff9c0fc1c2cac8654a8e"</code></td>
+<td class="colLast"><code>"b08697ae4a347f34273253e33ba91bb6b7ade5e0"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 9fb411a..d4c48b0 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -3513,6 +3513,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegion.html#applyToMemStore-org.apache.hadoop.hbase.regionserver.HStore-org.apache.hadoop.hbase.Cell-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">applyToMemStore(HStore, Cell, MemStoreSizing)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/HFileArchiver.html#archive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-org.apache.hadoop.fs.Path-">archive(FileSystem, RegionInfo, byte[], Collection&lt;HStoreFile&gt;, Path)</a></span> - Static method in class org.apache.hadoop.hbase.backup.<a href="org/apache/hadoop/hbase/backup/HFileArchiver.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiv [...]
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.PathHelper.html#archiveDataDir">archiveDataDir</a></span> - Variable in class org.apache.hadoop.hbase.security.access.<a href="org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.PathHelper.html" title="class in org.apache.hadoop.hbase.security.access">SnapshotScannerHDFSAclHelper.PathHelper</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.html#archivedTables">archivedTables</a></span> - Variable in class org.apache.hadoop.hbase.backup.example.<a href="org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.html" title="class in org.apache.hadoop.hbase.backup.example">HFileArchiveTableMonitor</a></dt>
@@ -3544,6 +3546,14 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/HFileLink.html#archivePath">archivePath</a></span> - Variable in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/HFileLink.html" title="class in org.apache.hadoop.hbase.io">HFileLink</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRecoveredEdits-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-">archiveRecoveredEdits(Configuration, FileSystem, RegionInfo, byte[], Collection&lt;HStoreFile&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.backup.<a href="org/apache/hadoop/hbase/backup/HFileArchiver.html" title="clas [...]
+<dd>
+<div class="block">Archive recovered edits using existing logic for archiving store files.</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#archiveRecoveredEdits-java.lang.String-java.util.Collection-">archiveRecoveredEdits(String, Collection&lt;HStoreFile&gt;)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></dt>
+<dd>
+<div class="block">Closes and archives the specified store files from the specified family.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRegion-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-">archiveRegion(Configuration, FileSystem, RegionInfo)</a></span> - Static method in class org.apache.hadoop.hbase.backup.<a href="org/apache/hadoop/hbase/backup/HFileArchiver.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver</a></dt>
 <dd>
 <div class="block">Cleans up all the files for a HRegion by archiving the HFiles to the archive directory</div>
@@ -51842,6 +51852,10 @@
 <dd>
 <div class="block">Gets the directory to archive a store directory.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getStoreArchivePathForRootDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-byte:A-">getStoreArchivePathForRootDir(Path, RegionInfo, byte[])</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HFileArchiveUtil.html" title="class in org.apache.hadoop.hbase.util">HFileArchiveUtil</a></dt>
+<dd>
+<div class="block">Gets the archive directory under specified root dir.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#getStoreCompactionPriority--">getStoreCompactionPriority()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html" title="class in org.apache.hadoop.hbase.regionserver">DefaultStoreFileManager</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/StoreFileManager.html#getStoreCompactionPriority--">getStoreCompactionPriority()</a></span> - Method in interface org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/StoreFileManager.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFileManager</a></dt>
@@ -59677,6 +59691,12 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html#HRegionServerCommandLine-java.lang.Class-">HRegionServerCommandLine(Class&lt;? extends HRegionServer&gt;)</a></span> - Constructor for class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServerCommandLine</a></dt>
 <dd>&nbsp;</dd>
+<dt><a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionWALFileSystem</span></a> - Class in <a href="org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></dt>
+<dd>
+<div class="block">A Wrapper for the region FileSystem operations adding WAL specific operations</div>
+</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#HRegionWALFileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">HRegionWALFileSystem(Configuration, FileSystem, Path, RegionInfo)</a></span> - Constructor for class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in  [...]
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html#hri">hri</a></span> - Variable in class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html" title="class in org.apache.hadoop.hbase.master.procedure">AbstractStateMachineRegionProcedure</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/RegionPlan.html#hri">hri</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/RegionPlan.html" title="class in org.apache.hadoop.hbase.master">RegionPlan</a></dt>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.File.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
index 40fb921..6fc4f9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.608">HFileArchiver.File</a>
+<pre>private abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.644">HFileArchiver.File</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Wrapper to handle file operations uniformly</div>
 </li>
@@ -246,7 +246,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.609">fs</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.645">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -263,7 +263,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>File</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.611">File</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.647">File</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -280,7 +280,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>delete</h4>
-<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.619">delete</a>()
+<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.655">delete</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Delete the file</div>
 <dl>
@@ -295,7 +295,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isFile</h4>
-<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.626">isFile</a>()
+<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.662">isFile</a>()
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check to see if this is a file or a directory</div>
 <dl>
@@ -312,7 +312,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getChildren</h4>
-<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.633">getChildren</a>()
+<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.669">getChildren</a>()
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -329,7 +329,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.639">close</a>()
+<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.675">close</a>()
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">close any outside readers of the file</div>
 <dl>
@@ -344,7 +344,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.645">getName</a>()</pre>
+<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.681">getName</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the name of the file (not the full fs path, just the individual
@@ -358,7 +358,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>abstract&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.650">getPath</a>()</pre>
+<pre>abstract&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.686">getPath</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the path to this file</dd>
@@ -371,7 +371,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>moveAndClose</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.658">moveAndClose</a>(org.apache.hadoop.fs.Path&nbsp;dest)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.694">moveAndClose</a>(org.apache.hadoop.fs.Path&nbsp;dest)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Move the file to the given destination</div>
 <dl>
@@ -390,7 +390,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileSystem</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.667">getFileSystem</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.703">getFileSystem</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the <code>FileSystem</code> on which this file resides</dd>
@@ -403,7 +403,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.672">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html#line.708">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
index 8663562..be7c78a 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
@@ -119,7 +119,7 @@
 </dl>
 <hr>
 <br>
-<pre>private abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.568">HFileArchiver.FileConverter</a>&lt;T&gt;
+<pre>private abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.604">HFileArchiver.FileConverter</a>&lt;T&gt;
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;T,<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;</pre>
 <div class="block">Adapt a type to match the <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup"><code>HFileArchiver.File</code></a> interface, which is used internally for handling
@@ -206,7 +206,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html#line.569">fs</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html#line.605">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -223,7 +223,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileConverter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html#line.571">FileConverter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html#line.607">FileConverter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
index c705897..31512c3 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.579">HFileArchiver.FileStatusConverter</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.615">HFileArchiver.FileStatusConverter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.FileConverter</a>&lt;org.apache.hadoop.fs.FileStatus&gt;</pre>
 <div class="block">Convert a FileStatus to something we can manage in the archiving</div>
 </li>
@@ -214,7 +214,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileStatusConverter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html#line.580">FileStatusConverter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html#line.616">FileStatusConverter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -231,7 +231,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>apply</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html#line.585">apply</a>(org.apache.hadoop.fs.FileStatus&nbsp;input)</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html#line.621">apply</a>(org.apache.hadoop.fs.FileStatus&nbsp;input)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
index 7b1ef16..3f248b1 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.680">HFileArchiver.FileablePath</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.716">HFileArchiver.FileablePath</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></pre>
 <div class="block">A <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup"><code>HFileArchiver.File</code></a> that wraps a simple <code>Path</code> on a <code>FileSystem</code>.</div>
 </li>
@@ -252,7 +252,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>file</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.681">file</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.717">file</a></pre>
 </li>
 </ul>
 <a name="getAsFile">
@@ -261,7 +261,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getAsFile</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.FileStatusConverter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.682">getAsFile</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.FileStatusConverter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.718">getAsFile</a></pre>
 </li>
 </ul>
 </li>
@@ -278,7 +278,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileablePath</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.684">FileablePath</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.720">FileablePath</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                     org.apache.hadoop.fs.Path&nbsp;file)</pre>
 </li>
 </ul>
@@ -296,7 +296,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>delete</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.691">delete</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.727">delete</a>()
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#delete--">HFileArchiver.File</a></code></span></div>
 <div class="block">Delete the file</div>
@@ -314,7 +314,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.696">getName</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.732">getName</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#getName--">getName</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></code></dd>
@@ -330,7 +330,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>getChildren</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.701">getChildren</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.737">getChildren</a>()
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -349,7 +349,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>isFile</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.709">isFile</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.745">isFile</a>()
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#isFile--">HFileArchiver.File</a></code></span></div>
 <div class="block">Check to see if this is a file or a directory</div>
@@ -369,7 +369,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.714">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.750">close</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#close--">HFileArchiver.File</a></code></span></div>
 <div class="block">close any outside readers of the file</div>
@@ -387,7 +387,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.719">getPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html#line.755">getPath</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#getPath--">getPath</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
index 8e5ea4c..f216994 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.728">HFileArchiver.FileableStoreFile</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.764">HFileArchiver.FileableStoreFile</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></pre>
 <div class="block"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup"><code>HFileArchiver.File</code></a> adapter for a <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver"><code>HStoreFile</code></a> living on a <code>FileSystem</code>
  .</div>
@@ -249,7 +249,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>file</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.729">file</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.765">file</a></pre>
 </li>
 </ul>
 </li>
@@ -266,7 +266,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileableStoreFile</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.731">FileableStoreFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.767">FileableStoreFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                          <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;store)</pre>
 </li>
 </ul>
@@ -284,7 +284,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>delete</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.737">delete</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.773">delete</a>()
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#delete--">HFileArchiver.File</a></code></span></div>
 <div class="block">Delete the file</div>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.742">getName</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.778">getName</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#getName--">getName</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></code></dd>
@@ -318,7 +318,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>isFile</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.747">isFile</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.783">isFile</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#isFile--">HFileArchiver.File</a></code></span></div>
 <div class="block">Check to see if this is a file or a directory</div>
 <dl>
@@ -335,7 +335,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>getChildren</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.752">getChildren</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.788">getChildren</a>()
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -354,7 +354,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.758">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.794">close</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#close--">HFileArchiver.File</a></code></span></div>
 <div class="block">close any outside readers of the file</div>
@@ -372,7 +372,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.763">getPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html#line.799">getPath</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html#getPath--">getPath</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
index 8180195..6231f4b 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.594">HFileArchiver.StoreToFile</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.630">HFileArchiver.StoreToFile</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.FileConverter</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</pre>
 <div class="block">Convert the <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver"><code>HStoreFile</code></a> into something we can manage in the archive
  methods</div>
@@ -215,7 +215,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StoreToFile</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html#line.595">StoreToFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html#line.631">StoreToFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -232,7 +232,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.Fil
 <ul class="blockListLast">
 <li class="blockList">
 <h4>apply</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html#line.600">apply</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;input)</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html#line.636">apply</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;input)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
index f8cc02d..3edc4c3 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.61">HFileArchiver</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.63">HFileArchiver</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Utility class to handle the removal of HFiles (or the respective <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver"><code>StoreFiles</code></a>)
  for a HRegion from the <code>FileSystem</code>. The hfiles will be archived or deleted, depending on
@@ -244,6 +244,14 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <th class="colLast" scope="col">Method and Description</th>
 </tr>
 <tr id="i0" class="altColor">
+<td class="colFirst"><code>private static void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-org.apache.hadoop.fs.Path-">archive</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+       <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+       byte[]&nbsp;family,
+       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+       org.apache.hadoop.fs.Path&nbsp;storeArchiveDir)</code>&nbsp;</td>
+</tr>
+<tr id="i1" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveFamily-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-">archiveFamily</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -254,7 +262,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  either by archiving them or outright deletion</div>
 </td>
 </tr>
-<tr id="i1" class="rowColor">
+<tr id="i2" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveFamilyByFamilyDir-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-">archiveFamilyByFamilyDir</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -265,7 +273,17 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  either by archiving them or outright deletion</div>
 </td>
 </tr>
-<tr id="i2" class="altColor">
+<tr id="i3" class="rowColor">
+<td class="colFirst"><code>static void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRecoveredEdits-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-">archiveRecoveredEdits</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                     org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                     <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+                     byte[]&nbsp;family,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;replayedEdits)</code>
+<div class="block">Archive recovered edits using existing logic for archiving store files.</div>
+</td>
+</tr>
+<tr id="i4" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRegion-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-">archiveRegion</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
              org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -273,7 +291,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Cleans up all the files for a HRegion by archiving the HFiles to the archive directory</div>
 </td>
 </tr>
-<tr id="i3" class="rowColor">
+<tr id="i5" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRegion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">archiveRegion</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              org.apache.hadoop.fs.Path&nbsp;rootdir,
@@ -282,7 +300,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Remove an entire region from the table directory via archiving the region's hfiles.</div>
 </td>
 </tr>
-<tr id="i4" class="altColor">
+<tr id="i6" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRegions-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-java.util.List-">archiveRegions</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
               org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -292,7 +310,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Archive the specified regions in parallel.</div>
 </td>
 </tr>
-<tr id="i5" class="rowColor">
+<tr id="i7" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveStoreFile-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-org.apache.hadoop.fs.Path-">archiveStoreFile</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                 org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -303,7 +321,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Archive the store file</div>
 </td>
 </tr>
-<tr id="i6" class="altColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveStoreFiles-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-java.util.Collection-">archiveStoreFiles</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                  org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -314,34 +332,34 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Remove the store files, either by archiving them or outright deletion</div>
 </td>
 </tr>
-<tr id="i7" class="rowColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>private static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#deleteRegionWithoutArchiving-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">deleteRegionWithoutArchiving</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                             org.apache.hadoop.fs.Path&nbsp;regionDir)</code>
 <div class="block">Without regard for backup, delete a region.</div>
 </td>
 </tr>
-<tr id="i8" class="altColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#deleteStoreFilesWithoutArchiving-java.util.Collection-">deleteStoreFilesWithoutArchiving</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoo [...]
 <div class="block">Just do a simple delete of the given store files</div>
 </td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#exists-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-">exists</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
       org.apache.hadoop.fs.FileSystem&nbsp;fs,
       <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;info)</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#getArchiveExecutor-org.apache.hadoop.conf.Configuration-">getArchiveExecutor</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadFactory.html?is-external=true" title="class or interface in java.util.concurrent">ThreadFactory</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#getThreadFactory--">getThreadFactory</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#resolveAndArchive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.util.Collection-long-">resolveAndArchive</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                  org.apache.hadoop.fs.Path&nbsp;baseArchiveDir,
@@ -351,7 +369,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  renaming of the existing file and then archive the passed in files.</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>private static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#resolveAndArchiveFile-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.backup.HFileArchiver.File-java.lang.String-">resolveAndArchiveFile</a></span>(org.apache.hadoop.fs.Path&nbsp;archiveDir,
                      <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;currentFile,
@@ -387,7 +405,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.62">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.64">LOG</a></pre>
 </li>
 </ul>
 <a name="SEPARATOR">
@@ -396,7 +414,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SEPARATOR</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.63">SEPARATOR</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.65">SEPARATOR</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.backup.HFileArchiver.SEPARATOR">Constant Field Values</a></dd>
@@ -409,7 +427,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_RETRIES_NUMBER</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.66">DEFAULT_RETRIES_NUMBER</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.68">DEFAULT_RETRIES_NUMBER</a></pre>
 <div class="block">Number of retries in case of fs operation failure</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -423,7 +441,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>FUNC_FILE_TO_PATH</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>,org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.68">FUNC_FILE_TO_PATH</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>,org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.70">FUNC_FILE_TO_PATH</a></pre>
 </li>
 </ul>
 <a name="archiveExecutor">
@@ -432,7 +450,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>archiveExecutor</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.76">archiveExecutor</a></pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.78">archiveExecutor</a></pre>
 </li>
 </ul>
 </li>
@@ -449,7 +467,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileArchiver</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.78">HFileArchiver</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.80">HFileArchiver</a>()</pre>
 </li>
 </ul>
 </li>
@@ -466,7 +484,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>exists</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.85">exists</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.87">exists</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                              org.apache.hadoop.fs.FileSystem&nbsp;fs,
                              <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;info)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -484,7 +502,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveRegion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.98">archiveRegion</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.100">archiveRegion</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                  org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                  <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;info)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -505,7 +523,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveRegion</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.116">archiveRegion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.118">archiveRegion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                     org.apache.hadoop.fs.Path&nbsp;rootdir,
                                     org.apache.hadoop.fs.Path&nbsp;tableDir,
                                     org.apache.hadoop.fs.Path&nbsp;regionDir)
@@ -532,7 +550,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveRegions</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.185">archiveRegions</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.187">archiveRegions</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                   org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                   org.apache.hadoop.fs.Path&nbsp;rootDir,
                                   org.apache.hadoop.fs.Path&nbsp;tableDir,
@@ -560,7 +578,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getArchiveExecutor</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.206">getArchiveExecutor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.208">getArchiveExecutor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="getThreadFactory--">
@@ -569,7 +587,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getThreadFactory</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadFactory.html?is-external=true" title="class or interface in java.util.concurrent">ThreadFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.222">getThreadFactory</a>()</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadFactory.html?is-external=true" title="class or interface in java.util.concurrent">ThreadFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.224">getThreadFactory</a>()</pre>
 </li>
 </ul>
 <a name="archiveFamily-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-">
@@ -578,7 +596,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveFamily</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.246">archiveFamily</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.248">archiveFamily</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                  org.apache.hadoop.conf.Configuration&nbsp;conf,
                                  <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;parent,
                                  org.apache.hadoop.fs.Path&nbsp;tableDir,
@@ -604,7 +622,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveFamilyByFamilyDir</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.262">archiveFamilyByFamilyDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.264">archiveFamilyByFamilyDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                             org.apache.hadoop.conf.Configuration&nbsp;conf,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;parent,
                                             org.apache.hadoop.fs.Path&nbsp;familyDir,
@@ -630,14 +648,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveStoreFiles</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.296">archiveStoreFiles</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.298">archiveStoreFiles</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                      org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                      <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                      org.apache.hadoop.fs.Path&nbsp;tableDir,
                                      byte[]&nbsp;family,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
-                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
-                                     <a href="../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></pre>
+                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Remove the store files, either by archiving them or outright deletion</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -649,7 +666,54 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
           attempted; otherwise likely to cause an <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io"><code>IOException</code></a></dd>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the files could not be correctly disposed.</dd>
-<dd><code><a href="../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="archiveRecoveredEdits-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>archiveRecoveredEdits</h4>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.319">archiveRecoveredEdits</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                                         org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                                         <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+                                         byte[]&nbsp;family,
+                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;replayedEdits)
+                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Archive recovered edits using existing logic for archiving store files. This is currently only
+ relevant when <b>hbase.region.archive.recovered.edits</b> is true, as recovered edits shouldn't
+ be kept after replay. In theory, we could use very same method available for archiving
+ store files, but supporting WAL dir and store files on different FileSystems added the need for
+ extra validation of the passed FileSystem instance and the path where the archiving edits
+ should be placed.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>conf</code> - <code>Configuration</code> to determine the archive directory.</dd>
+<dd><code>fs</code> - the filesystem used for storing WAL files.</dd>
+<dd><code>regionInfo</code> - <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client"><code>RegionInfo</code></a> a pseudo region representation for the archiving logic.</dd>
+<dd><code>family</code> - a pseudo familiy representation for the archiving logic.</dd>
+<dd><code>replayedEdits</code> - the recovered edits to be archived.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if files can't be achived due to some internal error.</dd>
+</dl>
+</li>
+</ul>
+<a name="archive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-org.apache.hadoop.fs.Path-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>archive</h4>
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.337">archive</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                            <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+                            byte[]&nbsp;family,
+                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+                            org.apache.hadoop.fs.Path&nbsp;storeArchiveDir)
+                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>
 </ul>
@@ -659,7 +723,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveStoreFile</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.356">archiveStoreFile</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.392">archiveStoreFile</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                     org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                     <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                     org.apache.hadoop.fs.Path&nbsp;tableDir,
@@ -686,7 +750,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>resolveAndArchive</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.389">resolveAndArchive</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.425">resolveAndArchive</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                           org.apache.hadoop.fs.Path&nbsp;baseArchiveDir,
                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&gt;&nbsp;toArchive,
                                                           long&nbsp;start)
@@ -716,7 +780,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>resolveAndArchiveFile</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.449">resolveAndArchiveFile</a>(org.apache.hadoop.fs.Path&nbsp;archiveDir,
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.485">resolveAndArchiveFile</a>(org.apache.hadoop.fs.Path&nbsp;archiveDir,
                                              <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup">HFileArchiver.File</a>&nbsp;currentFile,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;archiveStartTime)
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -743,7 +807,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteRegionWithoutArchiving</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.527">deleteRegionWithoutArchiving</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.563">deleteRegionWithoutArchiving</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                     org.apache.hadoop.fs.Path&nbsp;regionDir)
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Without regard for backup, delete a region. Should be used with caution.</div>
@@ -764,7 +828,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>deleteStoreFilesWithoutArchiving</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.546">deleteStoreFilesWithoutArchiving</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/backup/HFileArchiver.html#line.582">deleteStoreFilesWithoutArchiving</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Just do a simple delete of the given store files
  <p>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html b/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html
index ba5ce85..bcb57d9 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html
@@ -72,54 +72,7 @@
 <div class="header">
 <h2 title="Uses of Class org.apache.hadoop.hbase.backup.FailedArchiveException" class="title">Uses of Class<br>org.apache.hadoop.hbase.backup.FailedArchiveException</h2>
 </div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.backup">org.apache.hadoop.hbase.backup</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.backup">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a> in <a href="../../../../../../org/apache/hadoop/hbase/backup/package-summary.html">org.apache.hadoop.hbase.backup</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/backup/package-summary.html">org.apache.hadoop.hbase.backup</a> that throw <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>static void</code></td>
-<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveStoreFiles-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-java.util.Collection-">archiveStoreFiles</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-                 org.apache.hadoop.fs.FileSystem&nbsp;fs,
-                 <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
-                 org.apache.hadoop.fs.Path&nbsp;tableDir,
-                 byte[]&nbsp;family,
-                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)</code>
-<div class="block">Remove the store files, either by archiving them or outright deletion</div>
-</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
+<div class="classUseContainer">No usage of org.apache.hadoop.hbase.backup.FailedArchiveException</div>
 <!-- ======= START OF BOTTOM NAVBAR ====== -->
 <div class="bottomNav"><a name="navbar.bottom">
 <!--   -->
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 0e95f19..ea5e0bc 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-use.html b/devapidocs/org/apache/hadoop/hbase/backup/package-use.html
index d3fd60e..cc27052 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-use.html
@@ -159,38 +159,33 @@
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/BackupType.html#org.apache.hadoop.hbase.backup">BackupType</a>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html#org.apache.hadoop.hbase.backup">FailedArchiveException</a>
-<div class="block">Exception indicating that some files in the requested set could not be archived.</div>
-</td>
-</tr>
-<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/HFileArchiver.File.html#org.apache.hadoop.hbase.backup">HFileArchiver.File</a>
 <div class="block">Wrapper to handle file operations uniformly</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/HFileArchiver.FileConverter.html#org.apache.hadoop.hbase.backup">HFileArchiver.FileConverter</a>
 <div class="block">Adapt a type to match the <a href="../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.File.html" title="class in org.apache.hadoop.hbase.backup"><code>HFileArchiver.File</code></a> interface, which is used internally for handling
  archival/removal of files</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/HFileArchiver.FileStatusConverter.html#org.apache.hadoop.hbase.backup">HFileArchiver.FileStatusConverter</a>
 <div class="block">Convert a FileStatus to something we can manage in the archiving</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/RestoreJob.html#org.apache.hadoop.hbase.backup">RestoreJob</a>
 <div class="block">Restore operation job interface Concrete implementation is provided by backup provider, see
  <a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreFactory.html" title="class in org.apache.hadoop.hbase.backup"><code>BackupRestoreFactory</code></a></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/RestoreRequest.html#org.apache.hadoop.hbase.backup">RestoreRequest</a>
 <div class="block">POJO class for restore request</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/backup/class-use/RestoreRequest.Builder.html#org.apache.hadoop.hbase.backup">RestoreRequest.Builder</a>&nbsp;</td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index ff02734..951e610 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -681,6 +681,14 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><code>private static void</code></td>
+<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-org.apache.hadoop.fs.Path-">archive</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+       <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+       byte[]&nbsp;family,
+       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+       org.apache.hadoop.fs.Path&nbsp;storeArchiveDir)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveFamily-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-">archiveFamily</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -691,7 +699,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
  either by archiving them or outright deletion</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveFamilyByFamilyDir-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-">archiveFamilyByFamilyDir</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -702,6 +710,16 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
  either by archiving them or outright deletion</div>
 </td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static void</code></td>
+<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRecoveredEdits-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-">archiveRecoveredEdits</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                     org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                     <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+                     byte[]&nbsp;family,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;replayedEdits)</code>
+<div class="block">Archive recovered edits using existing logic for archiving store files.</div>
+</td>
+</tr>
 <tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRegion-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-">archiveRegion</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -5022,7 +5040,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <td class="colLast"><span class="typeNameLabel">HRegionFileSystem.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#regionInfo">regionInfo</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HRegionFileSystem.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#regionInfoForFs">regionInfoForFs</a></span></code>&nbsp;</td>
 </tr>
 </tbody>
@@ -5515,13 +5533,19 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="altColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#HRegionWALFileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">HRegionWALFileSystem</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                    org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                    org.apache.hadoop.fs.Path&nbsp;tableDir,
+                    <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html#RegionStateTransitionContext-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode-long-long-long-org.apache.hadoop.hbase.client.RegionInfo-">RegionStateTransitionContext</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.Reg [...]
                             long&nbsp;openSeqNum,
                             long&nbsp;procId,
                             long&nbsp;masterSystemTime,
                             <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html#RegionStateTransitionContext-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode-long-long-org.apache.hadoop.hbase.client.RegionInfo...-">RegionStateTransitionContext</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.Regio [...]
                             long&nbsp;openSeqNum,
                             long&nbsp;masterSystemTime,
@@ -6969,6 +6993,14 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
+<td class="colLast"><span class="typeNameLabel">HFileArchiveUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getStoreArchivePathForRootDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-byte:A-">getStoreArchivePathForRootDir</a></span>(org.apache.hadoop.fs.Path&nbsp;rootDir,
+                             <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region,
+                             byte[]&nbsp;family)</code>
+<div class="block">Gets the archive directory under specified root dir.</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a></code></td>
 <td class="colLast"><span class="typeNameLabel">ServerRegionReplicaUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.html#getStoreFileInfo-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.hbase.client.RegionInfo-java.lang.String-org.apache.hadoop.fs.Path-">getStoreFileInfo</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                 org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -6979,25 +7011,25 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <div class="block">Returns a StoreFileInfo from the given FileStatus.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">RegionMover.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/RegionMover.html#isSameServer-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.hbase.ServerName-">isSameServer</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region,
             <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>
 <div class="block">Returns true if passed region is still on serverName when we look at hbase:meta.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">RegionMover.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/RegionMover.html#isSuccessfulScan-org.apache.hadoop.hbase.client.RegionInfo-">isSuccessfulScan</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)</code>
 <div class="block">Tries to scan a row from passed region</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HBaseFsckRepair.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsckRepair.html#removeParentInMeta-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.RegionInfo-">removeParentInMeta</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                   <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HBaseFsckRepair.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsckRepair.html#waitUntilAssigned-org.apache.hadoop.hbase.client.Admin-org.apache.hadoop.hbase.client.RegionInfo-">waitUntilAssigned</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;admin,
                  <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)</code>&nbsp;</td>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index ca870b4..4c00f36 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -426,21 +426,21 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index dc618a4..c51f7b2 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -201,8 +201,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index 127caa7..531254c 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 98fd8ea..81523a2 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -190,13 +190,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index 677d4b5..1133c69 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -139,8 +139,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index fcc2915..883bb4f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -305,12 +305,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 6e34331..87f5ac4 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -296,10 +296,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
index b988a3c..b3e7c99 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
@@ -151,8 +151,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/ServerState.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">ServerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index e80ace6..65e7b06 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -362,12 +362,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 53f2cba..7d2182b 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -220,9 +220,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 7524d90..452eaee 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -127,8 +127,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index a2a14c4..b0a6833 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -425,19 +425,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 49595c8..fc6778c 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 3010e91..09244d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -240,12 +240,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
index 625ddc7..4d12b48 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -1449,7 +1449,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" tit
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html#getRegionServicesForStores--">getRegionServicesForStores</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i125" class="rowColor">
-<td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html#getRegionWALFileSystem--">getRegionWALFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i126" class="altColor">
@@ -4502,8 +4502,8 @@ public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/r
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionWALFileSystem</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.html#line.1960">getRegionWALFileSystem</a>()
-                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.html#line.1960">getRegionWALFileSystem</a>()
+                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the WAL <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><code>HRegionFileSystem</code></a> used by this region</dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
index edc6b8b..11f4e89 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
@@ -107,6 +107,10 @@ var activeTableTab = "activeTableTab";
 <div class="description">
 <ul class="blockList">
 <li class="blockList">
+<dl>
+<dt>Direct Known Subclasses:</dt>
+<dd><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></dd>
+</dl>
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
@@ -137,7 +141,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#baseSleepBeforeRetries">baseSleepBeforeRetries</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private org.apache.hadoop.conf.Configuration</code></td>
+<td class="colFirst"><code>(package private) org.apache.hadoop.conf.Configuration</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#conf">conf</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -149,7 +153,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#DEFAULT_HDFS_CLIENT_RETRIES_NUMBER">DEFAULT_HDFS_CLIENT_RETRIES_NUMBER</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private org.apache.hadoop.fs.FileSystem</code></td>
+<td class="colFirst"><code>(package private) org.apache.hadoop.fs.FileSystem</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#fs">fs</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -196,7 +200,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#regionInfo">regionInfo</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#regionInfoForFs">regionInfoForFs</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -737,7 +741,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>regionInfoForFs</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.83">regionInfoForFs</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.83">regionInfoForFs</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -746,7 +750,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.84">conf</a></pre>
+<pre>final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.84">conf</a></pre>
 </li>
 </ul>
 <a name="tableDir">
@@ -764,7 +768,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.86">fs</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#line.86">fs</a></pre>
 </li>
 </ul>
 <a name="regionDir">
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
index a1a3759..6081dc4 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../index.html?org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" target="_top">Frames</a></li>
@@ -375,7 +375,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/ServerCommandLine.h
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../index.html?org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" target="_top">Frames</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html
new file mode 100644
index 0000000..1948bbf
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html
@@ -0,0 +1,320 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc -->
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>HRegionWALFileSystem (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
+<script type="text/javascript" src="../../../../../script.js"></script>
+</head>
+<body>
+<script type="text/javascript"><!--
+    try {
+        if (location.href.indexOf('is-external=true') == -1) {
+            parent.document.title="HRegionWALFileSystem (Apache HBase 3.0.0-SNAPSHOT API)";
+        }
+    }
+    catch(err) {
+    }
+//-->
+var methods = {"i0":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar.top">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.top.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/HRegionWALFileSystem.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../index.html?org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" target="_top">Frames</a></li>
+<li><a href="HRegionWALFileSystem.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li><a href="#fields.inherited.from.class.org.apache.hadoop.hbase.regionserver.HRegionFileSystem">Field</a>&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<!-- ======== START OF CLASS DATA ======== -->
+<div class="header">
+<div class="subTitle">org.apache.hadoop.hbase.regionserver</div>
+<h2 title="Class HRegionWALFileSystem" class="title">Class HRegionWALFileSystem</h2>
+</div>
+<div class="contentContainer">
+<ul class="inheritance">
+<li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
+<li>
+<ul class="inheritance">
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">org.apache.hadoop.hbase.regionserver.HRegionFileSystem</a></li>
+<li>
+<ul class="inheritance">
+<li>org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+<div class="description">
+<ul class="blockList">
+<li class="blockList">
+<hr>
+<br>
+<pre>@InterfaceAudience.Private
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#line.34">HRegionWALFileSystem</a>
+extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></pre>
+<div class="block">A Wrapper for the region FileSystem operations adding WAL specific operations</div>
+</li>
+</ul>
+</div>
+<div class="summary">
+<ul class="blockList">
+<li class="blockList">
+<!-- =========== FIELD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="field.summary">
+<!--   -->
+</a>
+<h3>Field Summary</h3>
+<ul class="blockList">
+<li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.hbase.regionserver.HRegionFileSystem">
+<!--   -->
+</a>
+<h3>Fields inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></h3>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#conf">conf</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#fs">fs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#REGION_INFO_FILE">REGION_INFO_FILE</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#REGION_MERGES_DIR">REGION_MERGES_DIR</a>, <a href="../../../../../org/apache/hadoo [...]
+</ul>
+</li>
+</ul>
+<!-- ======== CONSTRUCTOR SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.summary">
+<!--   -->
+</a>
+<h3>Constructor Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
+<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colOne" scope="col">Constructor and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#HRegionWALFileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">HRegionWALFileSystem</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                    org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                    org.apache.hadoop.fs.Path&nbsp;tableDir,
+                    <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</code>&nbsp;</td>
+</tr>
+</table>
+</li>
+</ul>
+<!-- ========== METHOD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.summary">
+<!--   -->
+</a>
+<h3>Method Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
+<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tr id="i0" class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#archiveRecoveredEdits-java.lang.String-java.util.Collection-">archiveRecoveredEdits</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;storeFiles)</code>
+<div class="block">Closes and archives the specified store files from the specified family.</div>
+</td>
+</tr>
+</table>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.regionserver.HRegionFileSystem">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></h3>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#bulkLoadStoreFile-java.lang.String-org.apache.hadoop.fs.Path-long-">bulkLoadStoreFile</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#checkRegionInfoOnFilesystem--">checkRegionInfoOnFilesystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#cleanupAnySplitDetritus--">cleanupAnySplitDetritus</a>, <a href="../../../../.. [...]
+</ul>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
+<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="htt [...]
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="details">
+<ul class="blockList">
+<li class="blockList">
+<!-- ========= CONSTRUCTOR DETAIL ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor.detail">
+<!--   -->
+</a>
+<h3>Constructor Detail</h3>
+<a name="HRegionWALFileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>HRegionWALFileSystem</h4>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#line.36">HRegionWALFileSystem</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                     org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                     org.apache.hadoop.fs.Path&nbsp;tableDir,
+                     <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</pre>
+</li>
+</ul>
+</li>
+</ul>
+<!-- ============ METHOD DETAIL ========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method.detail">
+<!--   -->
+</a>
+<h3>Method Detail</h3>
+<a name="archiveRecoveredEdits-java.lang.String-java.util.Collection-">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>archiveRecoveredEdits</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#line.46">archiveRecoveredEdits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName,
+                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;storeFiles)
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Closes and archives the specified store files from the specified family.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>familyName</code> - Family that contains the store filesMeta</dd>
+<dd><code>storeFiles</code> - set of store files to remove</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the archiving fails</dd>
+</dl>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+</div>
+<!-- ========= END OF CLASS DATA ========= -->
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar.bottom">
+<!--   -->
+</a>
+<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
+<a name="navbar.bottom.firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="class-use/HRegionWALFileSystem.html">Use</a></li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../../../index-all.html">Index</a></li>
+<li><a href="../../../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../../../index.html?org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" target="_top">Frames</a></li>
+<li><a href="HRegionWALFileSystem.html" target="_top">No&nbsp;Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li>Nested&nbsp;|&nbsp;</li>
+<li><a href="#fields.inherited.from.class.org.apache.hadoop.hbase.regionserver.HRegionFileSystem">Field</a>&nbsp;|&nbsp;</li>
+<li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method.detail">Method</a></li>
+</ul>
+</div>
+<a name="skip.navbar.bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
+</body>
+</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 7c7595d..8e1e9fc 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -4227,7 +4227,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
index f2d4296..f903061 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
@@ -170,6 +170,21 @@
 <!--   -->
 </a>
 <h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a> in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></h3>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing subclasses, and an explanation">
+<caption><span>Subclasses of <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a> in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Class and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>class&nbsp;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></span></code>
+<div class="block">A Wrapper for the region FileSystem operations adding WAL specific operations</div>
+</td>
+</tr>
+</tbody>
+</table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
 <caption><span>Fields in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a> declared as <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
@@ -220,10 +235,6 @@
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getRegionFileSystem--">getRegionFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HRegion.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html#getRegionWALFileSystem--">getRegionWALFileSystem</a></span>()</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HRegionFileSystem.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#openRegionFromFileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-boolean-">openRegionFromFileSystem</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         org.apache.hadoop.fs.FileSystem&nbsp;fs,
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html
similarity index 59%
copy from devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html
copy to devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html
index ba5ce85..10580db 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html
@@ -4,7 +4,7 @@
 <head>
 <!-- Generated by javadoc -->
 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Class org.apache.hadoop.hbase.backup.FailedArchiveException (Apache HBase 3.0.0-SNAPSHOT API)</title>
+<title>Uses of Class org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem (Apache HBase 3.0.0-SNAPSHOT API)</title>
 <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
 <script type="text/javascript" src="../../../../../../script.js"></script>
 </head>
@@ -12,7 +12,7 @@
 <script type="text/javascript"><!--
     try {
         if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Class org.apache.hadoop.hbase.backup.FailedArchiveException (Apache HBase 3.0.0-SNAPSHOT API)";
+            parent.document.title="Uses of Class org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem (Apache HBase 3.0.0-SNAPSHOT API)";
         }
     }
     catch(err) {
@@ -33,7 +33,7 @@
 <ul class="navList" title="Navigation">
 <li><a href="../../../../../../overview-summary.html">Overview</a></li>
 <li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">Class</a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">Class</a></li>
 <li class="navBarCell1Rev">Use</li>
 <li><a href="../../../../../../overview-tree.html">Tree</a></li>
 <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
@@ -47,8 +47,8 @@
 <li>Next</li>
 </ul>
 <ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html" target="_top">Frames</a></li>
-<li><a href="FailedArchiveException.html" target="_top">No&nbsp;Frames</a></li>
+<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html" target="_top">Frames</a></li>
+<li><a href="HRegionWALFileSystem.html" target="_top">No&nbsp;Frames</a></li>
 </ul>
 <ul class="navList" id="allclasses_navbar_top">
 <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
@@ -70,20 +70,20 @@
 </a></div>
 <!-- ========= END OF TOP NAVBAR ========= -->
 <div class="header">
-<h2 title="Uses of Class org.apache.hadoop.hbase.backup.FailedArchiveException" class="title">Uses of Class<br>org.apache.hadoop.hbase.backup.FailedArchiveException</h2>
+<h2 title="Uses of Class org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem" class="title">Uses of Class<br>org.apache.hadoop.hbase.regionserver.HRegionWALFileSystem</h2>
 </div>
 <div class="classUseContainer">
 <ul class="blockList">
 <li class="blockList">
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></span><span class="tabEnd">&nbsp;</span></caption>
+<caption><span>Packages that use <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Package</th>
 <th class="colLast" scope="col">Description</th>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.backup">org.apache.hadoop.hbase.backup</a></td>
+<td class="colFirst"><a href="#org.apache.hadoop.hbase.regionserver">org.apache.hadoop.hbase.regionserver</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
 </tbody>
@@ -91,27 +91,20 @@
 </li>
 <li class="blockList">
 <ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.backup">
+<li class="blockList"><a name="org.apache.hadoop.hbase.regionserver">
 <!--   -->
 </a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a> in <a href="../../../../../../org/apache/hadoop/hbase/backup/package-summary.html">org.apache.hadoop.hbase.backup</a></h3>
+<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a> in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></h3>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/backup/package-summary.html">org.apache.hadoop.hbase.backup</a> that throw <a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">FailedArchiveException</a></span><span class="tabEnd">&nbsp;</span></caption>
+<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a> that return <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>
 <th class="colLast" scope="col">Method and Description</th>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>static void</code></td>
-<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveStoreFiles-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-java.util.Collection-">archiveStoreFiles</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-                 org.apache.hadoop.fs.FileSystem&nbsp;fs,
-                 <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
-                 org.apache.hadoop.fs.Path&nbsp;tableDir,
-                 byte[]&nbsp;family,
-                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)</code>
-<div class="block">Remove the store files, either by archiving them or outright deletion</div>
-</td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HRegion.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html#getRegionWALFileSystem--">getRegionWALFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>
@@ -131,7 +124,7 @@
 <ul class="navList" title="Navigation">
 <li><a href="../../../../../../overview-summary.html">Overview</a></li>
 <li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/backup/FailedArchiveException.html" title="class in org.apache.hadoop.hbase.backup">Class</a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">Class</a></li>
 <li class="navBarCell1Rev">Use</li>
 <li><a href="../../../../../../overview-tree.html">Tree</a></li>
 <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
@@ -145,8 +138,8 @@
 <li>Next</li>
 </ul>
 <ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/backup/class-use/FailedArchiveException.html" target="_top">Frames</a></li>
-<li><a href="FailedArchiveException.html" target="_top">No&nbsp;Frames</a></li>
+<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html" target="_top">Frames</a></li>
+<li><a href="HRegionWALFileSystem.html" target="_top">No&nbsp;Frames</a></li>
 </ul>
 <ul class="navList" id="allclasses_navbar_bottom">
 <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
index b8903ff..03caa51 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html
@@ -149,6 +149,24 @@
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><code>private static void</code></td>
+<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archive-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-org.apache.hadoop.fs.Path-">archive</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+       <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+       byte[]&nbsp;family,
+       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+       org.apache.hadoop.fs.Path&nbsp;storeArchiveDir)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static void</code></td>
+<td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveRecoveredEdits-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-byte:A-java.util.Collection-">archiveRecoveredEdits</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+                     org.apache.hadoop.fs.FileSystem&nbsp;fs,
+                     <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+                     byte[]&nbsp;family,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;replayedEdits)</code>
+<div class="block">Archive recovered edits using existing logic for archiving store files.</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">HFileArchiver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/backup/HFileArchiver.html#archiveStoreFiles-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.fs.Path-byte:A-java.util.Collection-">archiveStoreFiles</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                  org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -960,26 +978,33 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><span class="typeNameLabel">HRegionWALFileSystem.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html#archiveRecoveredEdits-java.lang.String-java.util.Collection-">archiveRecoveredEdits</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;storeFiles)</code>
+<div class="block">Closes and archives the specified store files from the specified family.</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#clearCompactedfiles-java.util.List-">clearCompactedfiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.ha [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">AbstractMultiFileWriter.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#commitWriters-long-boolean-java.util.Collection-">commitWriters</a></span>(long&nbsp;maxSeqId,
              boolean&nbsp;majorCompaction,
              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;storeFiles)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#completeCompaction-java.util.Collection-">completeCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class  [...]
 <div class="block">Update counts.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.CompactionOrFlushMergeCopy.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html#deleteResults-java.util.Collection-">deleteResults</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../ [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#doCompaction-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-java.util.Collection-org.apache.hadoop.hbase.security.User-long-java.util.List-">doCompaction</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache [...]
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToCompact,
@@ -987,63 +1012,63 @@
             long&nbsp;compactionStartTime,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;newFiles)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#ensureEdgeStripeMetadata-java.util.ArrayList-boolean-">ensureEdgeStripeMetadata</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../../org/apache/hadoop/ [...]
                         boolean&nbsp;isFirst)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#findExpiredFiles-org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList-long-java.util.List-java.util.Collection-">findExpiredFiles</a></span>(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionser [...]
                 long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;expiredStoreFiles)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#findExpiredFiles-org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList-long-java.util.List-java.util.Collection-">findExpiredFiles</a></span>(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionser [...]
                 long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;expiredStoreFiles)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#findExpiredFiles-org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList-long-java.util.List-java.util.Collection-">findExpiredFiles</a></span>(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionser [...]
                 long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;expiredStoreFiles)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalInt.html?is-external=true" title="class or interface in java.util">OptionalInt</a></code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getDeterministicRandomSeed-java.util.Collection-">getDeterministicRandomSeed</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore [...]
 <div class="block">Creates a deterministic hash code for store file collection.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getLargestFile-java.util.Collection-">getLargestFile</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class  [...]
 <div class="block">Gets the largest file (with reader) out of the list of files.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static long</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getLowestTimestamp-java.util.Collection-">getLowestTimestamp</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title [...]
 <div class="block">Gets lowest timestamp from candidate StoreFiles</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getMaxMemStoreTSInList-java.util.Collection-">getMaxMemStoreTSInList</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.htm [...]
 <div class="block">Return the largest memstoreTS found across all storefiles in the given list.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getMaxSequenceIdInList-java.util.Collection-">getMaxSequenceIdInList</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.htm [...]
 <div class="block">Return the highest sequence ID found across all storefiles in the given list.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-java.util.List-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-byte:A-long-boolean-">getScanners</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt; [...]
            boolean&nbsp;cacheBlocks,
@@ -1059,7 +1084,7 @@
  (that happens further down the line).</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-java.util.List-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-boolean-byte:A-boolean-long-boolean-">getScanners</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List [...]
            boolean&nbsp;cacheBlocks,
@@ -1076,7 +1101,7 @@
  (that happens further down the line).</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileScanner.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html#getScannersForCompaction-java.util.Collection-boolean-long-">getScannersForCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbas [...]
                         boolean&nbsp;canUseDropBehind,
@@ -1084,7 +1109,7 @@
 <div class="block">Get scanners for compaction.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileScanner.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html#getScannersForStoreFiles-java.util.Collection-boolean-boolean-boolean-boolean-long-">getScannersForStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../. [...]
                         boolean&nbsp;cacheBlocks,
@@ -1095,7 +1120,7 @@
 <div class="block">Return an array of scanners corresponding to the given set of store files.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileScanner.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html#getScannersForStoreFiles-java.util.Collection-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-long-">getScannersForStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or  [...]
                         boolean&nbsp;cacheBlocks,
@@ -1108,95 +1133,95 @@
  ScanQueryMatcher for each store file scanner for further optimization</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#getSplitPoint-java.util.Collection-org.apache.hadoop.hbase.CellComparator-">getSplitPoint</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/region [...]
              <a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator)</code>
 <div class="block">Gets the mid point of the largest file passed in as split point.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesSize-java.util.Collection-java.util.function.Predicate-">getStorefilesSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HSt [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;predicate)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesSize-java.util.Collection-java.util.function.Predicate-">getStorefilesSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HSt [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;predicate)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalSize-java.util.Collection-">getTotalSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apach [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalUncompressedBytes-java.util.List-">getTotalUncompressedBytes</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in o [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileManager.html#getUnneededFiles-long-java.util.List-">getUnneededFiles</a></span>(long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#getUnneededFiles-long-java.util.List-">getUnneededFiles</a></span>(long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#getUnneededFiles-long-java.util.List-">getUnneededFiles</a></span>(long&nbsp;maxTs,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacting)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreUtils.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html#hasReferences-java.util.Collection-">hasReferences</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in [...]
 <div class="block">Determines whether any files in the collection are references.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#insertFileIntoStripe-java.util.ArrayList-org.apache.hadoop.hbase.regionserver.HStoreFile-">insertFileIntoStripe</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../. [...]
                     <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;sf)</code>
 <div class="block">Inserts a file in the correct place (by seqnum) in a stripe copy.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileManager.html#insertNewFiles-java.util.Collection-">insertNewFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" t [...]
 <div class="block">Adds new files, either for from MemStore flush or bulk insert, into the structure.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#insertNewFiles-java.util.Collection-">insertNewFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HSto [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#insertNewFiles-java.util.Collection-">insertNewFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileManager.html#loadFiles-java.util.List-">loadFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.ha [...]
 <div class="block">Loads the initial store files into empty StoreFileManager.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#loadFiles-java.util.List-">loadFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#loadFiles-java.util.List-">loadFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in o [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#loadUnclassifiedStoreFiles-java.util.List-">loadUnclassifiedStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ [...]
 <div class="block">Loads initial store files that were picked up from some physical location pertaining to
  this store (presumably).</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#logCompactionEndMessage-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-java.util.List-long-long-">logCompactionEndMessage</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compa [...]
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
@@ -1205,43 +1230,43 @@
 <div class="block">Log a very elaborate compaction completion message.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#markCompactedAway-java.util.Collection-">markCompactedAway</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.CompactionOrFlushMergeCopy.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html#mergeResults-java.util.Collection-java.util.Collection-">mergeResults</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt [...]
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;results)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.CompactionOrFlushMergeCopy.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html#mergeResults-java.util.Collection-java.util.Collection-">mergeResults</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt [...]
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;results)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">DateTieredStoreEngine.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.html#needsCompaction-java.util.List-">needsCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title=" [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreEngine.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.html#needsCompaction-java.util.List-">needsCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class  [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreEngine.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreEngine.html#needsCompaction-java.util.List-">needsCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>abstract boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreEngine.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html#needsCompaction-java.util.List-">needsCompaction</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache. [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#notifyChangedReadersObservers-java.util.List-">notifyChangedReadersObservers</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="cl [...]
 <div class="block">Notify all observers that set of Readers has changed.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RegionCoprocessorHost.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html#postCompactSelection-org.apache.hadoop.hbase.regionserver.HStore-java.util.List-org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest-org.apache.hadoop.hbase.security.User-">postCompactSelection</a></ [...]
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;selected,
@@ -1252,7 +1277,7 @@
  candidates.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">RegionCoprocessorHost.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html#preCompactSelection-org.apache.hadoop.hbase.regionserver.HStore-java.util.List-org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker-org.apache.hadoop.hbase.security.User-">preCompactSelection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver [...]
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;candidates,
@@ -1262,90 +1287,90 @@
  available candidates.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">DateTieredStoreEngine.DateTieredCompactionContext.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.DateTieredCompactionContext.html#preSelect-java.util.List-">preSelect</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreEngine.DefaultCompactionContext.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.DefaultCompactionContext.html#preSelect-java.util.List-">preSelect</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regio [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreEngine.StripeCompaction.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreEngine.StripeCompaction.html#preSelect-java.util.List-">preSelect</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.CompactionOrFlushMergeCopy.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html#processNewCandidateStripes-java.util.TreeMap-">processNewCandidateStripes</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeMap.html?is-external=true" title="class or interface in java.util">TreeMap</a>&lt;b [...]
 <div class="block">See <a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#addCompactionResults-java.util.Collection-java.util.Collection-"><code>StripeStoreFileManager.addCompactionResults(Collection, Collection)</code></a> - updates the stripe list with
  new candidate stripes/removes old stripes; produces new set of stripe end rows.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#removeCompactedfiles-java.util.Collection-">removeCompactedfiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="cl [...]
 <div class="block">Archives and removes the compacted files</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileManager.html#removeCompactedFiles-java.util.Collection-">removeCompactedFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore [...]
 <div class="block">Remove the compacted files</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#removeCompactedFiles-java.util.Collection-">removeCompactedFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regio [...]
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#removeCompactedFiles-java.util.Collection-">removeCompactedFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regions [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">HRegionFileSystem.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#removeStoreFiles-java.lang.String-java.util.Collection-">removeStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;storeFiles)</code>
 <div class="block">Closes and archives the specified store files from the specified family.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#replaceStoreFiles-java.util.Collection-java.util.Collection-">replaceStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile. [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;result)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#replaceStoreFiles-java.util.Collection-java.util.Collection-">replaceStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile. [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;result)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">DateTieredStoreEngine.DateTieredCompactionContext.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.DateTieredCompactionContext.html#select-java.util.List-boolean-boolean-boolean-">select</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../.. [...]
       boolean&nbsp;isUserCompaction,
       boolean&nbsp;mayUseOffPeak,
       boolean&nbsp;forceMajor)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreEngine.DefaultCompactionContext.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.DefaultCompactionContext.html#select-java.util.List-boolean-boolean-boolean-">select</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/ [...]
       boolean&nbsp;isUserCompaction,
       boolean&nbsp;mayUseOffPeak,
       boolean&nbsp;forceMajor)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreEngine.StripeCompaction.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreEngine.StripeCompaction.html#select-java.util.List-boolean-boolean-boolean-">select</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regio [...]
       boolean&nbsp;isUserCompaction,
       boolean&nbsp;mayUseOffPeak,
       boolean&nbsp;forceMajor)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private byte[]</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileWriter.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html#toCompactionEventTrackerBytes-java.util.Collection-">toCompactionEventTrackerBytes</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/reg [...]
 <div class="block">Used when write <a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html#COMPACTION_EVENT_KEY"><code>COMPACTION_EVENT_KEY</code></a> to new file's file info.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileManager.html#updateCandidateFilesForRowKeyBefore-java.util.Iterator-org.apache.hadoop.hbase.KeyValue-org.apache.hadoop.hbase.Cell-">updateCandidateFilesForRowKeyBefore</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util"> [...]
                                    <a href="../../../../../../org/apache/hadoop/hbase/KeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue</a>&nbsp;targetKey,
@@ -1353,13 +1378,13 @@
 <div class="block">Updates the candidate list for finding row key before.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">DefaultStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html#updateCandidateFilesForRowKeyBefore-java.util.Iterator-org.apache.hadoop.hbase.KeyValue-org.apache.hadoop.hbase.Cell-">updateCandidateFilesForRowKeyBefore</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface  [...]
                                    <a href="../../../../../../org/apache/hadoop/hbase/KeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue</a>&nbsp;targetKey,
                                    <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;candidate)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">StripeStoreFileManager.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html#updateCandidateFilesForRowKeyBefore-java.util.Iterator-org.apache.hadoop.hbase.KeyValue-org.apache.hadoop.hbase.Cell-">updateCandidateFilesForRowKeyBefore</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in [...]
                                    <a href="../../../../../../org/apache/hadoop/hbase/KeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue</a>&nbsp;targetKey,
@@ -1369,19 +1394,19 @@
  for details on this methods.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">StoreScanner.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html#updateReaders-java.util.List-java.util.List-">updateReaders</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class i [...]
              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;memStoreScanners)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">ChangedReadersObserver.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html#updateReaders-java.util.List-java.util.List-">updateReaders</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile. [...]
              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;memStoreScanners)</code>
 <div class="block">Notify observers.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#updateSpaceQuotaAfterFileReplacement-org.apache.hadoop.hbase.quotas.RegionSizeStore-org.apache.hadoop.hbase.client.RegionInfo-java.util.Collection-java.util.Collection-">updateSpaceQuotaAfterFileReplacement</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in o [...]
                                     <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
@@ -1391,7 +1416,7 @@
  and adding in the size for new files.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#updateSpaceQuotaAfterFileReplacement-org.apache.hadoop.hbase.quotas.RegionSizeStore-org.apache.hadoop.hbase.client.RegionInfo-java.util.Collection-java.util.Collection-">updateSpaceQuotaAfterFileReplacement</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in o [...]
                                     <a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
@@ -1401,25 +1426,25 @@
  and adding in the size for new files.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#updateStorefiles-java.util.List-long-">updateStorefiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.had [...]
                 long&nbsp;snapshotId)</code>
 <div class="block">Change storeFiles adding into place the Reader produced by this new flush.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter.Builder</a></code></td>
 <td class="colLast"><span class="typeNameLabel">StoreFileWriter.Builder.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html#withCompactedFilesSupplier-java.util.function.Supplier-">withCompactedFilesSupplier</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Supplier.html?is-external=true" title="class or interface in java.util.function">Supplier</a>&lt;<a href="https://docs.o [...]
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#writeCompactionWalRecord-java.util.Collection-java.util.Collection-">writeCompactionWalRecord</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserv [...]
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)</code>
 <div class="block">Writes the compaction WAL record.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#writeCompactionWalRecord-java.util.Collection-java.util.Collection-">writeCompactionWalRecord</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserv [...]
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)</code>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
index 5bc5b60..afe2701 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-frame.html
@@ -146,6 +146,7 @@
 <li><a href="HRegionServer.PeriodicMemStoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServer.PeriodicMemStoreFlusher</a></li>
 <li><a href="HRegionServer.SystemExitWhenAbortTimeout.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServer.SystemExitWhenAbortTimeout</a></li>
 <li><a href="HRegionServerCommandLine.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionServerCommandLine</a></li>
+<li><a href="HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HRegionWALFileSystem</a></li>
 <li><a href="HStore.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HStore</a></li>
 <li><a href="HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">HStoreFile</a></li>
 <li><a href="ImmutableMemStoreLAB.html" title="class in org.apache.hadoop.hbase.regionserver" target="classFrame">ImmutableMemStoreLAB</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
index e53cd24..ecc4a93 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-summary.html
@@ -884,31 +884,37 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a></td>
+<td class="colLast">
+<div class="block">A Wrapper for the region FileSystem operations adding WAL specific operations</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></td>
 <td class="colLast">
 <div class="block">A Store holds a column family in a Region.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a></td>
 <td class="colLast">
 <div class="block">A Store data file.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html" title="class in org.apache.hadoop.hbase.regionserver">ImmutableMemStoreLAB</a></td>
 <td class="colLast">
 <div class="block">A MemStoreLAB implementation which wraps N MemStoreLABs.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ImmutableSegment.html" title="class in org.apache.hadoop.hbase.regionserver">ImmutableSegment</a></td>
 <td class="colLast">
 <div class="block">ImmutableSegment is an abstract class that extends the API supported by a <a href="../../../../../org/apache/hadoop/hbase/regionserver/Segment.html" title="class in org.apache.hadoop.hbase.regionserver"><code>Segment</code></a>,
  and is not needed for a <a href="../../../../../org/apache/hadoop/hbase/regionserver/MutableSegment.html" title="class in org.apache.hadoop.hbase.regionserver"><code>MutableSegment</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">IncreasingToUpperBoundRegionSplitPolicy</a></td>
 <td class="colLast">
 <div class="block">Split size is the number of regions that are on this server that all are
@@ -916,14 +922,14 @@
  region split size, whichever is smaller.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScan.html" title="class in org.apache.hadoop.hbase.regionserver">InternalScan</a></td>
 <td class="colLast">
 <div class="block">Special scanner, currently used for increment operations to
  allow additional server-side arguments for Scan operations.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">KeyPrefixRegionSplitPolicy</a></td>
 <td class="colLast">
 <div class="block">A custom RegionSplitPolicy implementing a SplitPolicy that groups
@@ -932,17 +938,17 @@
  This ensures that a region is not split "inside" a prefix of a row key.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueHeap.html" title="class in org.apache.hadoop.hbase.regionserver">KeyValueHeap</a></td>
 <td class="colLast">
 <div class="block">Implements a heap merge across any number of KeyValueScanners.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueHeap.KVScannerComparator.html" title="class in org.apache.hadoop.hbase.regionserver">KeyValueHeap.KVScannerComparator</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/LeaseManager.html" title="class in org.apache.hadoop.hbase.regionserver">LeaseManager</a></td>
 <td class="colLast">
 <div class="block">Leases
@@ -951,46 +957,46 @@
  clients that occasionally send heartbeats.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/LeaseManager.Lease.html" title="class in org.apache.hadoop.hbase.regionserver">LeaseManager.Lease</a></td>
 <td class="colLast">
 <div class="block">This class tracks a single Lease.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/LogRoller.html" title="class in org.apache.hadoop.hbase.regionserver">LogRoller</a></td>
 <td class="colLast">
 <div class="block">Runs periodically to determine if the WAL should be rolled.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MasterFifoRpcSchedulerFactory.html" title="class in org.apache.hadoop.hbase.regionserver">MasterFifoRpcSchedulerFactory</a></td>
 <td class="colLast">
 <div class="block">Factory to use when you want to use the <a href="../../../../../org/apache/hadoop/hbase/ipc/MasterFifoRpcScheduler.html" title="class in org.apache.hadoop.hbase.ipc"><code>MasterFifoRpcScheduler</code></a></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreCompactionStrategy</a></td>
 <td class="colLast">
 <div class="block">MemStoreCompactionStrategy is the root of a class hierarchy which defines the strategy for
  choosing the next action to apply in an (in-memory) memstore compaction.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreCompactor</a></td>
 <td class="colLast">
 <div class="block">The ongoing MemStore Compaction manager, dispatches a solo running compaction and interrupts
  the compaction if requested.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreCompactorSegmentsIterator</a></td>
 <td class="colLast">
 <div class="block">The MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator
  and performs the scan for compaction operation meaning it is based on SQM</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreFlusher</a></td>
 <td class="colLast">
 <div class="block">Thread that flushes cache on request
@@ -1000,26 +1006,26 @@
  sleep time which is invariant.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushRegionEntry.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreFlusher.FlushRegionEntry</a></td>
 <td class="colLast">
 <div class="block">Datastructure used in the flush queue.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreLABImpl</a></td>
 <td class="colLast">
 <div class="block">A memstore-local allocation buffer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreMergerSegmentsIterator</a></td>
 <td class="colLast">
 <div class="block">The MemStoreMergerSegmentsIterator extends MemStoreSegmentsIterator
  and performs the scan for simple merge operation meaning it is NOT based on SQM</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSegmentsIterator.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSegmentsIterator</a></td>
 <td class="colLast">
 <div class="block">The MemStoreSegmentsIterator is designed to perform one iteration over given list of segments
@@ -1028,43 +1034,43 @@
  in each period of time</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a></td>
 <td class="colLast">
 <div class="block">Data structure of three longs.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a></td>
 <td class="colLast">
 <div class="block">Holds details of the snapshot taken on a MemStore.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManager.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManager</a></td>
 <td class="colLast">
 <div class="block">This class is for maintaining the various regionserver's heap memory manager statistics and
  publishing them through the metrics interfaces.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManagerSourceImpl</a></td>
 <td class="colLast">
 <div class="block">Hadoop2 implementation of MetricsHeapMemoryManagerSource.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegion.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegion</a></td>
 <td class="colLast">
 <div class="block">This is the glue between the HRegion and whatever hadoop shim layer
  is loaded (hbase-hadoop1-compat or hbase-hadoop2-compat).</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionAggregateSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a></td>
 <td class="colLast">
 <div class="block">
@@ -1072,134 +1078,134 @@
  and publishing them through the metrics interfaces.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerQuotaSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServerQuotaSourceImpl</a></td>
 <td class="colLast">
 <div class="block">Implementation of <a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerQuotaSource.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>MetricsRegionServerQuotaSource</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSourceFactoryImpl</a></td>
 <td class="colLast">
 <div class="block">Factory to create MetricsRegionServerSource when given a  MetricsRegionServerWrapper</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSourceImpl</a></td>
 <td class="colLast">
 <div class="block">Hadoop2 implementation of MetricsRegionServerSource.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServerWrapperImpl</a></td>
 <td class="colLast">
 <div class="block">Impl for exposing HRegionServer Information through Hadoop's metrics 2 system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionWrapperImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTable.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTable</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableAggregateSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableLatenciesImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableLatenciesImpl</a></td>
 <td class="colLast">
 <div class="block">Implementation of <a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableLatencies.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>MetricsTableLatencies</code></a> to track latencies for one table in a
  RegionServer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableLatenciesImpl.TableHistograms.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableLatenciesImpl.TableHistograms</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableQueryMeterImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableQueryMeterImpl</a></td>
 <td class="colLast">
 <div class="block">Implementation of <a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableQueryMeter.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>MetricsTableQueryMeter</code></a> to track query per second for each table in
  a RegionServer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableQueryMeterImpl.TableMeters.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableQueryMeterImpl.TableMeters</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableWrapperAggregateImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.MetricsTableValues.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTableWrapperAggregateImpl.MetricsTableValues</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsUserAggregateFactory.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsUserAggregateFactory</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsUserAggregateImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsUserAggregateImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsUserAggregateSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsUserAggregateSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsUserSourceImpl.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsUserSourceImpl</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MiniBatchOperationInProgress.html" title="class in org.apache.hadoop.hbase.regionserver">MiniBatchOperationInProgress</a>&lt;T&gt;</td>
 <td class="colLast">
 <div class="block">Wraps together the mutations which are applied as a batch to the region and their operation
  status and WALEdits.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.html" title="class in org.apache.hadoop.hbase.regionserver">MobReferenceOnlyFilter</a></td>
 <td class="colLast">
 <div class="block">A filter that returns the cells which have mob reference tags.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MobStoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">MobStoreScanner</a></td>
 <td class="colLast">
 <div class="block">Scanner scans both the memstore and the MOB Store.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.html" title="class in org.apache.hadoop.hbase.regionserver">MultiVersionConcurrencyControl</a></td>
 <td class="colLast">
 <div class="block">Manages the read/write consistency.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.WriteEntry.html" title="class in org.apache.hadoop.hbase.regionserver">MultiVersionConcurrencyControl.WriteEntry</a></td>
 <td class="colLast">
 <div class="block">Write number and whether write has completed given out at start of a write transaction.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/MutableSegment.html" title="class in org.apache.hadoop.hbase.regionserver">MutableSegment</a></td>
 <td class="colLast">
 <div class="block">A mutable segment in memstore, specifically the active segment.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.html" title="class in org.apache.hadoop.hbase.regionserver">NoLimitScannerContext</a></td>
 <td class="colLast">
 <div class="block">This is a special <a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.html" title="class in org.apache.hadoop.hbase.regionserver"><code>ScannerContext</code></a> subclass that is designed to be used globally when
@@ -1207,207 +1213,207 @@
  or <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.html" title="class in org.apache.hadoop.hbase.regionserver">NonLazyKeyValueScanner</a></td>
 <td class="colLast">
 <div class="block">A "non-lazy" scanner which always does a real seek operation.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.html" title="class in org.apache.hadoop.hbase.regionserver">NonReversedNonLazyKeyValueScanner</a></td>
 <td class="colLast">
 <div class="block">A "non-reversed &amp; non-lazy" scanner which does not support backward scanning
  and always does a real seek operation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/NonThreadSafeMemStoreSizing.html" title="class in org.apache.hadoop.hbase.regionserver">NonThreadSafeMemStoreSizing</a></td>
 <td class="colLast">
 <div class="block">Accounting of current heap and data sizes.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/NoTagByteBufferChunkKeyValue.html" title="class in org.apache.hadoop.hbase.regionserver">NoTagByteBufferChunkKeyValue</a></td>
 <td class="colLast">
 <div class="block">ByteBuffer based cell which has the chunkid at the 0th offset and with no tags</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/OffheapChunk.html" title="class in org.apache.hadoop.hbase.regionserver">OffheapChunk</a></td>
 <td class="colLast">
 <div class="block">An off heap chunk implementation.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/OnheapChunk.html" title="class in org.apache.hadoop.hbase.regionserver">OnheapChunk</a></td>
 <td class="colLast">
 <div class="block">An on heap chunk implementation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/OperationStatus.html" title="class in org.apache.hadoop.hbase.regionserver">OperationStatus</a></td>
 <td class="colLast">
 <div class="block">This class stores the Operation status code and the exception message
  that occurs in case of failure of operations like put, delete, etc.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a></td>
 <td class="colLast">
 <div class="block">Implements the coprocessor environment and runtime support for coprocessors
  loaded within a <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>Region</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost.RegionEnvironment</a></td>
 <td class="colLast">
 <div class="block">Encapsulation of the environment of each coprocessor</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors</a></td>
 <td class="colLast">
 <div class="block">Special version of RegionEnvironment that exposes RegionServerServices for Core
  Coprocessors only.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost.TableCoprocessorAttribute</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a></td>
 <td class="colLast">
 <div class="block">RegionServerAccounting keeps record of some basic real time information about
  the Region Server.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerEnvironment.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost.RegionServerEnvironment</a></td>
 <td class="colLast">
 <div class="block">Coprocessor environment extension providing access to region server
  related services.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.RegionServerEnvironmentForCoreCoprocessors.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost.RegionServerEnvironmentForCoreCoprocessors</a></td>
 <td class="colLast">
 <div class="block">Special version of RegionServerEnvironment that exposes RegionServerServices for Core
  Coprocessors only.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.PostOpenDeployContext</a></td>
 <td class="colLast">
 <div class="block">Context for postOpenDeployTasks().</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.RegionStateTransitionContext</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerTableMetrics.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerTableMetrics</a></td>
 <td class="colLast">
 <div class="block">Captures operation metrics by table.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServicesForStores.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServicesForStores</a></td>
 <td class="colLast">
 <div class="block">Services a Store needs from a Region.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a></td>
 <td class="colLast">
 <div class="block">A split policy determines when a Region should be split.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RemoteProcedureResultReporter.html" title="class in org.apache.hadoop.hbase.regionserver">RemoteProcedureResultReporter</a></td>
 <td class="colLast">
 <div class="block">A thread which calls <code>reportProcedureDone</code> to tell master the result of a remote procedure.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.html" title="class in org.apache.hadoop.hbase.regionserver">ReversedKeyValueHeap</a></td>
 <td class="colLast">
 <div class="block">ReversedKeyValueHeap is used for supporting reversed scanning.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html" title="class in org.apache.hadoop.hbase.regionserver">ReversedKeyValueHeap.ReversedKVScannerComparator</a></td>
 <td class="colLast">
 <div class="block">In ReversedKVScannerComparator, we compare the row of scanners' peek values
  first, sort bigger one before the smaller one.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReversedMobStoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">ReversedMobStoreScanner</a></td>
 <td class="colLast">
 <div class="block">ReversedMobStoreScanner extends from ReversedStoreScanner, and is used to support reversed
  scanning in both the memstore and the MOB store.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReversedRegionScannerImpl.html" title="class in org.apache.hadoop.hbase.regionserver">ReversedRegionScannerImpl</a></td>
 <td class="colLast">
 <div class="block">ReversibleRegionScannerImpl extends from RegionScannerImpl, and is used to
  support reversed scanning.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">ReversedStoreScanner</a></td>
 <td class="colLast">
 <div class="block">ReversedStoreScanner extends from StoreScanner, and is used to support
  reversed scanning.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSDumpServlet.html" title="class in org.apache.hadoop.hbase.regionserver">RSDumpServlet</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a></td>
 <td class="colLast">
 <div class="block">Implements the regionserver RPC services.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices.RegionScannerCloseCallBack</a></td>
 <td class="colLast">
 <div class="block">An Rpc callback for closing a RegionScanner.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices.RegionScannerHolder</a></td>
 <td class="colLast">
 <div class="block">Holder class which holds the RegionScanner, nextCallSeq and RpcCallbacks together.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices.RegionScannersCloseCallBack</a></td>
 <td class="colLast">
 <div class="block">An RpcCallBack that creates a list of scanners that needs to perform callBack operation on
  completion of multiGets.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSStatusServlet.html" title="class in org.apache.hadoop.hbase.regionserver">RSStatusServlet</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a></td>
 <td class="colLast">
 <div class="block">Immutable information for scans over a store.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.html" title="class in org.apache.hadoop.hbase.regionserver">ScannerContext</a></td>
 <td class="colLast">
 <div class="block">ScannerContext instances encapsulate limit tracking AND progress towards those limits during
@@ -1415,92 +1421,92 @@
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.Builder.html" title="class in org.apache.hadoop.hbase.regionserver">ScannerContext.Builder</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitFields.html" title="class in org.apache.hadoop.hbase.regionserver">ScannerContext.LimitFields</a></td>
 <td class="colLast">
 <div class="block">The different fields that can be used as limits in calls to
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a> and <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.ProgressFields.html" title="class in org.apache.hadoop.hbase.regionserver">ScannerContext.ProgressFields</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.html" title="class in org.apache.hadoop.hbase.regionserver">ScannerIdGenerator</a></td>
 <td class="colLast">
 <div class="block">Generate a new style scanner id to prevent collision with previous started server or other RSs.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.html" title="class in org.apache.hadoop.hbase.regionserver">SecureBulkLoadManager</a></td>
 <td class="colLast">
 <div class="block">Bulk loads in secure mode.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.SecureBulkLoadListener.html" title="class in org.apache.hadoop.hbase.regionserver">SecureBulkLoadManager.SecureBulkLoadListener</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/Segment.html" title="class in org.apache.hadoop.hbase.regionserver">Segment</a></td>
 <td class="colLast">
 <div class="block">This is an abstraction of a segment maintained in a memstore, e.g., the active
  cell set or its snapshot.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SegmentFactory.html" title="class in org.apache.hadoop.hbase.regionserver">SegmentFactory</a></td>
 <td class="colLast">
 <div class="block">A singleton store segment factory.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SegmentScanner.html" title="class in org.apache.hadoop.hbase.regionserver">SegmentScanner</a></td>
 <td class="colLast">
 <div class="block">A scanner of a single memstore segment.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ServerNonceManager.html" title="class in org.apache.hadoop.hbase.regionserver">ServerNonceManager</a></td>
 <td class="colLast">
 <div class="block">Implementation of nonce manager that stores nonces in a hash map and cleans them up after
  some time; if nonce group/client ID is supplied, nonces are stored by client ID.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ServerNonceManager.OperationContext.html" title="class in org.apache.hadoop.hbase.regionserver">ServerNonceManager.OperationContext</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ShutdownHook.html" title="class in org.apache.hadoop.hbase.regionserver">ShutdownHook</a></td>
 <td class="colLast">
 <div class="block">Manage regionserver shutdown hooks.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ShutdownHook.DoNothingStoppable.html" title="class in org.apache.hadoop.hbase.regionserver">ShutdownHook.DoNothingStoppable</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ShutdownHook.DoNothingThread.html" title="class in org.apache.hadoop.hbase.regionserver">ShutdownHook.DoNothingThread</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ShutdownHook.ShutdownHookThread.html" title="class in org.apache.hadoop.hbase.regionserver">ShutdownHook.ShutdownHookThread</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SimpleRpcSchedulerFactory.html" title="class in org.apache.hadoop.hbase.regionserver">SimpleRpcSchedulerFactory</a></td>
 <td class="colLast">
 <div class="block">Constructs a <a href="../../../../../org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.html" title="class in org.apache.hadoop.hbase.ipc"><code>SimpleRpcScheduler</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SnapshotSegmentScanner.html" title="class in org.apache.hadoop.hbase.regionserver">SnapshotSegmentScanner</a></td>
 <td class="colLast">
 <div class="block">A basic SegmentScanner used against an ImmutableScanner snapshot
@@ -1508,203 +1514,203 @@
  inserts happening.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.html" title="class in org.apache.hadoop.hbase.regionserver">SplitLogWorker</a></td>
 <td class="colLast">
 <div class="block">This worker is spawned in every regionserver, including master.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitRequest.html" title="class in org.apache.hadoop.hbase.regionserver">SplitRequest</a></td>
 <td class="colLast">
 <div class="block">Handles processing region splits.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitWALCallable.html" title="class in org.apache.hadoop.hbase.regionserver">SplitWALCallable</a></td>
 <td class="colLast">
 <div class="block">This callable is used to do the real split WAL task.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">SteppingSplitPolicy</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;SF extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFlusher</a>,CP extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionPolicy.html" title="class in org.apache.hadoop.hbase.regionserv [...]
 <td class="colLast">
 <div class="block">StoreEngine is a factory that can create the objects necessary for HStore to operate.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileComparators.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileComparators</a></td>
 <td class="colLast">
 <div class="block">Useful comparators for comparing store files.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetBulkTime.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileComparators.GetBulkTime</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetFileSize.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileComparators.GetFileSize</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetMaxTimestamp.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileComparators.GetMaxTimestamp</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetPathName.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileComparators.GetPathName</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a></td>
 <td class="colLast">
 <div class="block">Describe a StoreFile (hfile, reference, link)</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a></td>
 <td class="colLast">
 <div class="block">Reader for a StoreFile.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.html" title="class in org.apache.hadoop.hbase.regionserver">StorefileRefresherChore</a></td>
 <td class="colLast">
 <div class="block">A chore for refreshing the store files for secondary regions hosted in the region server.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a></td>
 <td class="colLast">
 <div class="block">KeyValueScanner adaptor over the Reader.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></td>
 <td class="colLast">
 <div class="block">A StoreFile writer.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter.Builder</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFlusher</a></td>
 <td class="colLast">
 <div class="block">Store flusher interface.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a></td>
 <td class="colLast">
 <div class="block">Scanner scans both the memstore and the Store.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreUtils.html" title="class in org.apache.hadoop.hbase.regionserver">StoreUtils</a></td>
 <td class="colLast">
 <div class="block">Utility functions for region server storage layer.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StripeMultiFileWriter</a></td>
 <td class="colLast">
 <div class="block">Base class for cell sink that separates the provided cells into multiple files for stripe
  compaction.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.BoundaryMultiWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StripeMultiFileWriter.BoundaryMultiWriter</a></td>
 <td class="colLast">
 <div class="block">MultiWriter that separates the cells based on fixed row-key boundaries.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.SizeMultiWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StripeMultiFileWriter.SizeMultiWriter</a></td>
 <td class="colLast">
 <div class="block">MultiWriter that separates the cells based on target cell number per file and file count.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreConfig.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreConfig</a></td>
 <td class="colLast">
 <div class="block">Configuration class for stripe store and compactions.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreEngine</a></td>
 <td class="colLast">
 <div class="block">The storage engine that implements the stripe-based store/compaction scheme.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFileManager</a></td>
 <td class="colLast">
 <div class="block">Stripe implementation of StoreFileManager.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.KeyBeforeConcatenatedLists.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFileManager.KeyBeforeConcatenatedLists</a></td>
 <td class="colLast">
 <div class="block">An extension of ConcatenatedLists that has several peculiar properties.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.State.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFileManager.State</a></td>
 <td class="colLast">
 <div class="block">The state class.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFlusher</a></td>
 <td class="colLast">
 <div class="block">Stripe implementation of StoreFlusher.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFlusher.BoundaryStripeFlushRequest</a></td>
 <td class="colLast">
 <div class="block">Stripe flush request wrapper based on boundaries.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.SizeStripeFlushRequest.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFlusher.SizeStripeFlushRequest</a></td>
 <td class="colLast">
 <div class="block">Stripe flush request wrapper based on size.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.StripeFlushRequest.html" title="class in org.apache.hadoop.hbase.regionserver">StripeStoreFlusher.StripeFlushRequest</a></td>
 <td class="colLast">
 <div class="block">Stripe flush request wrapper that writes a non-striped file.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/ThreadSafeMemStoreSizing.html" title="class in org.apache.hadoop.hbase.regionserver">ThreadSafeMemStoreSizing</a></td>
 <td class="colLast">
 <div class="block">Accounting of current heap and data sizes.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.html" title="class in org.apache.hadoop.hbase.regionserver">TimeRangeTracker</a></td>
 <td class="colLast">
 <div class="block">Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
  interval notation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.NonSyncTimeRangeTracker.html" title="class in org.apache.hadoop.hbase.regionserver">TimeRangeTracker.NonSyncTimeRangeTracker</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.SyncTimeRangeTracker.html" title="class in org.apache.hadoop.hbase.regionserver">TimeRangeTracker.SyncTimeRangeTracker</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/regionserver/VersionedSegmentsList.html" title="class in org.apache.hadoop.hbase.regionserver">VersionedSegmentsList</a></td>
 <td class="colLast">
 <div class="block">A list of segment managers coupled with the version of the memstore (version at the time it was
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index d95c0e2..29b60f7 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -296,7 +296,11 @@
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.RowLockContext</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.RowLockImpl</span></a> (implements org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.RowLock.html" title="interface in org.apache.hadoop.hbase.regionserver">Region.RowLock</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.WriteState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionFileSystem</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionFileSystem</span></a>
+<ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionWALFileSystem</span></a></li>
+</ul>
+</li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionServer.MovedRegionInfo</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HStore</span></a> (implements org.apache.hadoop.hbase.io.<a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a>, org.apache.hadoop.hbase.conf.<a href="../../../../../org/apache/hadoop/hbase/conf/PropagatingConf [...]
 <ul>
@@ -725,20 +729,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
index ed79ce4..48dbd37 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-use.html
@@ -1242,69 +1242,74 @@ service.</div>
 </td>
 </tr>
 <tr class="altColor">
+<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/HRegionWALFileSystem.html#org.apache.hadoop.hbase.regionserver">HRegionWALFileSystem</a>
+<div class="block">A Wrapper for the region FileSystem operations adding WAL specific operations</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/HStore.html#org.apache.hadoop.hbase.regionserver">HStore</a>
 <div class="block">A Store holds a column family in a Region.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/HStoreFile.html#org.apache.hadoop.hbase.regionserver">HStoreFile</a>
 <div class="block">A Store data file.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ImmutableSegment.html#org.apache.hadoop.hbase.regionserver">ImmutableSegment</a>
 <div class="block">ImmutableSegment is an abstract class that extends the API supported by a <a href="../../../../../org/apache/hadoop/hbase/regionserver/Segment.html" title="class in org.apache.hadoop.hbase.regionserver"><code>Segment</code></a>,
  and is not needed for a <a href="../../../../../org/apache/hadoop/hbase/regionserver/MutableSegment.html" title="class in org.apache.hadoop.hbase.regionserver"><code>MutableSegment</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/IncreasingToUpperBoundRegionSplitPolicy.html#org.apache.hadoop.hbase.regionserver">IncreasingToUpperBoundRegionSplitPolicy</a>
 <div class="block">Split size is the number of regions that are on this server that all are
  of the same table, cubed, times 2x the region flush size OR the maximum
  region split size, whichever is smaller.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/InternalScanner.html#org.apache.hadoop.hbase.regionserver">InternalScanner</a>
 <div class="block">Internal scanners differ from client-side scanners in that they operate on
  HStoreKeys and byte[] instead of RowResults.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/InvalidMutationDurabilityException.html#org.apache.hadoop.hbase.regionserver">InvalidMutationDurabilityException</a>
 <div class="block">Thrown if Mutation's <a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><code>Durability</code></a> is skip wal but table need replication.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/KeyValueHeap.html#org.apache.hadoop.hbase.regionserver">KeyValueHeap</a>
 <div class="block">Implements a heap merge across any number of KeyValueScanners.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/KeyValueHeap.KVScannerComparator.html#org.apache.hadoop.hbase.regionserver">KeyValueHeap.KVScannerComparator</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/KeyValueScanner.html#org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>
 <div class="block">Scanner that returns the next KeyValue.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LastSequenceId.html#org.apache.hadoop.hbase.regionserver">LastSequenceId</a>
 <div class="block">Last flushed sequence Ids for the regions and their stores on region server</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LeaseException.html#org.apache.hadoop.hbase.regionserver">LeaseException</a>
 <div class="block">Reports a problem with a lease</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LeaseListener.html#org.apache.hadoop.hbase.regionserver">LeaseListener</a>
 <div class="block">LeaseListener is an interface meant to be implemented by users of the Leases
  class.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LeaseManager.html#org.apache.hadoop.hbase.regionserver">LeaseManager</a>
 <div class="block">Leases
 
@@ -1312,45 +1317,45 @@ service.</div>
  clients that occasionally send heartbeats.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LeaseManager.Lease.html#org.apache.hadoop.hbase.regionserver">LeaseManager.Lease</a>
 <div class="block">This class tracks a single Lease.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LeaseManager.LeaseStillHeldException.html#org.apache.hadoop.hbase.regionserver">LeaseManager.LeaseStillHeldException</a>
 <div class="block">Thrown if we are asked to create a lease but lease on passed name already
  exists.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/LogRoller.html#org.apache.hadoop.hbase.regionserver">LogRoller</a>
 <div class="block">Runs periodically to determine if the WAL should be rolled.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStore.html#org.apache.hadoop.hbase.regionserver">MemStore</a>
 <div class="block">The MemStore holds in-memory modifications to the Store.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreCompactionStrategy.html#org.apache.hadoop.hbase.regionserver">MemStoreCompactionStrategy</a>
 <div class="block">MemStoreCompactionStrategy is the root of a class hierarchy which defines the strategy for
  choosing the next action to apply in an (in-memory) memstore compaction.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreCompactionStrategy.Action.html#org.apache.hadoop.hbase.regionserver">MemStoreCompactionStrategy.Action</a>
 <div class="block">Types of actions to be done on the pipeline upon MemStoreCompaction invocation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreCompactor.html#org.apache.hadoop.hbase.regionserver">MemStoreCompactor</a>
 <div class="block">The ongoing MemStore Compaction manager, dispatches a solo running compaction and interrupts
  the compaction if requested.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreFlusher.html#org.apache.hadoop.hbase.regionserver">MemStoreFlusher</a>
 <div class="block">Thread that flushes cache on request
 
@@ -1359,23 +1364,23 @@ service.</div>
  sleep time which is invariant.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreFlusher.FlushHandler.html#org.apache.hadoop.hbase.regionserver">MemStoreFlusher.FlushHandler</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreFlusher.FlushQueueEntry.html#org.apache.hadoop.hbase.regionserver">MemStoreFlusher.FlushQueueEntry</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreFlusher.FlushRegionEntry.html#org.apache.hadoop.hbase.regionserver">MemStoreFlusher.FlushRegionEntry</a>
 <div class="block">Datastructure used in the flush queue.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreLAB.html#org.apache.hadoop.hbase.regionserver">MemStoreLAB</a>
 <div class="block">A memstore-local allocation buffer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreSegmentsIterator.html#org.apache.hadoop.hbase.regionserver">MemStoreSegmentsIterator</a>
 <div class="block">The MemStoreSegmentsIterator is designed to perform one iteration over given list of segments
  For another iteration new instance of MemStoreSegmentsIterator needs to be created
@@ -1383,326 +1388,326 @@ service.</div>
  in each period of time</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreSize.html#org.apache.hadoop.hbase.regionserver">MemStoreSize</a>
 <div class="block">Data structure of three longs.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreSizing.html#org.apache.hadoop.hbase.regionserver">MemStoreSizing</a>
 <div class="block">Accounting of current heap and data sizes.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MemStoreSnapshot.html#org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a>
 <div class="block">Holds details of the snapshot taken on a MemStore.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsHeapMemoryManager.html#org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManager</a>
 <div class="block">This class is for maintaining the various regionserver's heap memory manager statistics and
  publishing them through the metrics interfaces.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsHeapMemoryManagerSource.html#org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManagerSource</a>
 <div class="block">This interface will be implemented by a MetricsSource that will export metrics from
  HeapMemoryManager in RegionServer into the hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsHeapMemoryManagerSourceImpl.html#org.apache.hadoop.hbase.regionserver">MetricsHeapMemoryManagerSourceImpl</a>
 <div class="block">Hadoop2 implementation of MetricsHeapMemoryManagerSource.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegion.html#org.apache.hadoop.hbase.regionserver">MetricsRegion</a>
 <div class="block">This is the glue between the HRegion and whatever hadoop shim layer
  is loaded (hbase-hadoop1-compat or hbase-hadoop2-compat).</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionAggregateSource.html#org.apache.hadoop.hbase.regionserver">MetricsRegionAggregateSource</a>
 <div class="block">This interface will be implemented by a MetricsSource that will export metrics from
  multiple regions into the hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionAggregateSourceImpl.html#org.apache.hadoop.hbase.regionserver">MetricsRegionAggregateSourceImpl</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServer.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a>
 <div class="block">
  This class is for maintaining the various regionserver statistics
  and publishing them through the metrics interfaces.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerQuotaSource.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerQuotaSource</a>
 <div class="block">A collection of exposed metrics for space quotas from an HBase RegionServer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerSource.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerSource</a>
 <div class="block">Interface for classes that expose metrics about the regionserver.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerSourceFactory.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerSourceFactory</a>
 <div class="block">Interface of a factory to create Metrics Sources used inside of regionservers.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerSourceFactoryImpl.FactoryStorage</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerWrapper.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerWrapper</a>
 <div class="block">This is the interface that will expose RegionServer information to hadoop1/hadoop2
  implementations of the MetricsRegionServerSource.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionServerWrapperImpl.html#org.apache.hadoop.hbase.regionserver">MetricsRegionServerWrapperImpl</a>
 <div class="block">Impl for exposing HRegionServer Information through Hadoop's metrics 2 system.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionSource.html#org.apache.hadoop.hbase.regionserver">MetricsRegionSource</a>
 <div class="block">This interface will be implemented to allow single regions to push metrics into
  MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionWrapper.html#org.apache.hadoop.hbase.regionserver">MetricsRegionWrapper</a>
 <div class="block">Interface of class that will wrap an HRegion and export numbers so they can be
  used in MetricsRegionSource</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsRegionWrapperImpl.html#org.apache.hadoop.hbase.regionserver">MetricsRegionWrapperImpl</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTable.html#org.apache.hadoop.hbase.regionserver">MetricsTable</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableAggregateSource.html#org.apache.hadoop.hbase.regionserver">MetricsTableAggregateSource</a>
 <div class="block">This interface will be implemented by a MetricsSource that will export metrics from
  multiple regions of a table into the hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableAggregateSourceImpl.html#org.apache.hadoop.hbase.regionserver">MetricsTableAggregateSourceImpl</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableLatencies.html#org.apache.hadoop.hbase.regionserver">MetricsTableLatencies</a>
 <div class="block">Latency metrics for a specific table in a RegionServer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableLatenciesImpl.TableHistograms.html#org.apache.hadoop.hbase.regionserver">MetricsTableLatenciesImpl.TableHistograms</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableQueryMeter.html#org.apache.hadoop.hbase.regionserver">MetricsTableQueryMeter</a>
 <div class="block">Query Per Second for each table in a RegionServer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableQueryMeterImpl.TableMeters.html#org.apache.hadoop.hbase.regionserver">MetricsTableQueryMeterImpl.TableMeters</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableSource.html#org.apache.hadoop.hbase.regionserver">MetricsTableSource</a>
 <div class="block">This interface will be implemented to allow region server to push table metrics into
  MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableWrapperAggregate.html#org.apache.hadoop.hbase.regionserver">MetricsTableWrapperAggregate</a>
 <div class="block">Interface of class that will wrap a MetricsTableSource and export numbers so they can be
  used in MetricsTableSource</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsTableWrapperAggregateImpl.MetricsTableValues.html#org.apache.hadoop.hbase.regionserver">MetricsTableWrapperAggregateImpl.MetricsTableValues</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsUserAggregate.html#org.apache.hadoop.hbase.regionserver">MetricsUserAggregate</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsUserAggregateSource.html#org.apache.hadoop.hbase.regionserver">MetricsUserAggregateSource</a>
 <div class="block">This interface will be implemented by a MetricsSource that will export metrics from
  multiple users into the hadoop metrics system.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsUserAggregateSourceImpl.html#org.apache.hadoop.hbase.regionserver">MetricsUserAggregateSourceImpl</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MetricsUserSource.html#org.apache.hadoop.hbase.regionserver">MetricsUserSource</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MiniBatchOperationInProgress.html#org.apache.hadoop.hbase.regionserver">MiniBatchOperationInProgress</a>
 <div class="block">Wraps together the mutations which are applied as a batch to the region and their operation
  status and WALEdits.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MultiVersionConcurrencyControl.html#org.apache.hadoop.hbase.regionserver">MultiVersionConcurrencyControl</a>
 <div class="block">Manages the read/write consistency.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MultiVersionConcurrencyControl.WriteEntry.html#org.apache.hadoop.hbase.regionserver">MultiVersionConcurrencyControl.WriteEntry</a>
 <div class="block">Write number and whether write has completed given out at start of a write transaction.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MutableOnlineRegions.html#org.apache.hadoop.hbase.regionserver">MutableOnlineRegions</a>
 <div class="block">Interface to Map of online regions.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/MutableSegment.html#org.apache.hadoop.hbase.regionserver">MutableSegment</a>
 <div class="block">A mutable segment in memstore, specifically the active segment.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/NonLazyKeyValueScanner.html#org.apache.hadoop.hbase.regionserver">NonLazyKeyValueScanner</a>
 <div class="block">A "non-lazy" scanner which always does a real seek operation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/NonReversedNonLazyKeyValueScanner.html#org.apache.hadoop.hbase.regionserver">NonReversedNonLazyKeyValueScanner</a>
 <div class="block">A "non-reversed &amp; non-lazy" scanner which does not support backward scanning
  and always does a real seek operation.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/NoSuchColumnFamilyException.html#org.apache.hadoop.hbase.regionserver">NoSuchColumnFamilyException</a>
 <div class="block">Thrown if request for nonexistent column family.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/OnlineRegions.html#org.apache.hadoop.hbase.regionserver">OnlineRegions</a>
 <div class="block">Provides read-only access to the Regions presently online on the
  current RegionServer</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/OperationStatus.html#org.apache.hadoop.hbase.regionserver">OperationStatus</a>
 <div class="block">This class stores the Operation status code and the exception message
  that occurs in case of failure of operations like put, delete, etc.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Region.html#org.apache.hadoop.hbase.regionserver">Region</a>
 <div class="block">Region is a subset of HRegion with operations required for the <a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionCoprocessor.html" title="interface in org.apache.hadoop.hbase.coprocessor"><code>Coprocessors</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Region.Operation.html#org.apache.hadoop.hbase.regionserver">Region.Operation</a>
 <div class="block">Operation enum is used in <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html#startRegionOperation--"><code>Region.startRegionOperation()</code></a> and elsewhere to provide
  context for various checks.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Region.RowLock.html#org.apache.hadoop.hbase.regionserver">Region.RowLock</a>
 <div class="block">Row lock held by a given thread.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.html#org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>
 <div class="block">Implements the coprocessor environment and runtime support for coprocessors
  loaded within a <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>Region</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.RegionEnvironment.html#org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost.RegionEnvironment</a>
 <div class="block">Encapsulation of the environment of each coprocessor</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionCoprocessorHost.TableCoprocessorAttribute.html#org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost.TableCoprocessorAttribute</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionScanner.html#org.apache.hadoop.hbase.regionserver">RegionScanner</a>
 <div class="block">RegionScanner describes iterators over rows in an HRegion.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerAccounting.html#org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a>
 <div class="block">RegionServerAccounting keeps record of some basic real time information about
  the Region Server.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerCoprocessorHost.html#org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerCoprocessorHost.RegionServerEnvironment.html#org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost.RegionServerEnvironment</a>
 <div class="block">Coprocessor environment extension providing access to region server
  related services.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html#org.apache.hadoop.hbase.regionserver">RegionServerServices</a>
 <div class="block">A curated subset of services provided by <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver"><code>HRegionServer</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.PostOpenDeployContext.html#org.apache.hadoop.hbase.regionserver">RegionServerServices.PostOpenDeployContext</a>
 <div class="block">Context for postOpenDeployTasks().</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.RegionStateTransitionContext.html#org.apache.hadoop.hbase.regionserver">RegionServerServices.RegionStateTransitionContext</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerStoppedException.html#org.apache.hadoop.hbase.regionserver">RegionServerStoppedException</a>
 <div class="block">Thrown by the region server when it is in shutting down state.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServerTableMetrics.html#org.apache.hadoop.hbase.regionserver">RegionServerTableMetrics</a>
 <div class="block">Captures operation metrics by table.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionServicesForStores.html#org.apache.hadoop.hbase.regionserver">RegionServicesForStores</a>
 <div class="block">Services a Store needs from a Region.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RegionSplitPolicy.html#org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a>
 <div class="block">A split policy determines when a Region should be split.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RemoteProcedureResultReporter.html#org.apache.hadoop.hbase.regionserver">RemoteProcedureResultReporter</a>
 <div class="block">A thread which calls <code>reportProcedureDone</code> to tell master the result of a remote procedure.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ReplicationService.html#org.apache.hadoop.hbase.regionserver">ReplicationService</a>
 <div class="block">Gateway to Cluster Replication.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ReplicationSinkService.html#org.apache.hadoop.hbase.regionserver">ReplicationSinkService</a>
 <div class="block">A sink for a replication stream has to expose this service.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ReplicationSourceService.html#org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a>
 <div class="block">A source for a replication stream has to expose this service.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ReversedStoreScanner.html#org.apache.hadoop.hbase.regionserver">ReversedStoreScanner</a>
 <div class="block">ReversedStoreScanner extends from StoreScanner, and is used to support
  reversed scanning.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RowProcessor.html#org.apache.hadoop.hbase.regionserver">RowProcessor</a>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
 <div class="block"><span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0. For customization, use
@@ -1710,241 +1715,241 @@ service.</div>
 </div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RpcSchedulerFactory.html#org.apache.hadoop.hbase.regionserver">RpcSchedulerFactory</a>
 <div class="block">A factory class that constructs an <a href="../../../../../org/apache/hadoop/hbase/ipc/RpcScheduler.html" title="class in org.apache.hadoop.hbase.ipc"><code>RpcScheduler</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RSRpcServices.html#org.apache.hadoop.hbase.regionserver">RSRpcServices</a>
 <div class="block">Implements the regionserver RPC services.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RSRpcServices.LogDelegate.html#org.apache.hadoop.hbase.regionserver">RSRpcServices.LogDelegate</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RSRpcServices.RegionScannerHolder.html#org.apache.hadoop.hbase.regionserver">RSRpcServices.RegionScannerHolder</a>
 <div class="block">Holder class which holds the RegionScanner, nextCallSeq and RpcCallbacks together.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/RSRpcServices.RegionScannersCloseCallBack.html#org.apache.hadoop.hbase.regionserver">RSRpcServices.RegionScannersCloseCallBack</a>
 <div class="block">An RpcCallBack that creates a list of scanners that needs to perform callBack operation on
  completion of multiGets.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScanInfo.html#org.apache.hadoop.hbase.regionserver">ScanInfo</a>
 <div class="block">Immutable information for scans over a store.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.html#org.apache.hadoop.hbase.regionserver">ScannerContext</a>
 <div class="block">ScannerContext instances encapsulate limit tracking AND progress towards those limits during
  invocations of <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a> and
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a>.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.Builder.html#org.apache.hadoop.hbase.regionserver">ScannerContext.Builder</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitFields.html#org.apache.hadoop.hbase.regionserver">ScannerContext.LimitFields</a>
 <div class="block">The different fields that can be used as limits in calls to
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a> and <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(java.util.List)</code></a></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.LimitScope.html#org.apache.hadoop.hbase.regionserver">ScannerContext.LimitScope</a>
 <div class="block">The various scopes where a limit can be enforced.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.NextState.html#org.apache.hadoop.hbase.regionserver">ScannerContext.NextState</a>
 <div class="block">The possible states a scanner may be in following a call to <a href="../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html#next-java.util.List-"><code>InternalScanner.next(List)</code></a></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerContext.ProgressFields.html#org.apache.hadoop.hbase.regionserver">ScannerContext.ProgressFields</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScannerIdGenerator.html#org.apache.hadoop.hbase.regionserver">ScannerIdGenerator</a>
 <div class="block">Generate a new style scanner id to prevent collision with previous started server or other RSs.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScanOptions.html#org.apache.hadoop.hbase.regionserver">ScanOptions</a>
 <div class="block">This class gives you the ability to change the max versions and TTL options before opening a
  scanner for a Store.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ScanType.html#org.apache.hadoop.hbase.regionserver">ScanType</a>
 <div class="block">Enum to distinguish general scan types.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/SecureBulkLoadManager.html#org.apache.hadoop.hbase.regionserver">SecureBulkLoadManager</a>
 <div class="block">Bulk loads in secure mode.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Segment.html#org.apache.hadoop.hbase.regionserver">Segment</a>
 <div class="block">This is an abstraction of a segment maintained in a memstore, e.g., the active
  cell set or its snapshot.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/SegmentFactory.html#org.apache.hadoop.hbase.regionserver">SegmentFactory</a>
 <div class="block">A singleton store segment factory.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ServerNonceManager.html#org.apache.hadoop.hbase.regionserver">ServerNonceManager</a>
 <div class="block">Implementation of nonce manager that stores nonces in a hash map and cleans them up after
  some time; if nonce group/client ID is supplied, nonces are stored by client ID.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ServerNonceManager.OperationContext.html#org.apache.hadoop.hbase.regionserver">ServerNonceManager.OperationContext</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Shipper.html#org.apache.hadoop.hbase.regionserver">Shipper</a>
 <div class="block">This interface denotes a scanner as one which can ship cells.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/ShipperListener.html#org.apache.hadoop.hbase.regionserver">ShipperListener</a>
 <div class="block">Implementors of this interface are the ones who needs to do some action when the
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html#shipped--"><code>Shipper.shipped()</code></a> is called</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/SplitLogWorker.html#org.apache.hadoop.hbase.regionserver">SplitLogWorker</a>
 <div class="block">This worker is spawned in every regionserver, including master.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/SplitLogWorker.TaskExecutor.html#org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor</a>
 <div class="block">Objects implementing this interface actually do the task that has been
  acquired by a <a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.html" title="class in org.apache.hadoop.hbase.regionserver"><code>SplitLogWorker</code></a>.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/SplitLogWorker.TaskExecutor.Status.html#org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/Store.html#org.apache.hadoop.hbase.regionserver">Store</a>
 <div class="block">Interface for objects that hold a column family in a Region.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreConfigInformation.html#org.apache.hadoop.hbase.regionserver">StoreConfigInformation</a>
 <div class="block">A more restricted interface for HStore.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreEngine.html#org.apache.hadoop.hbase.regionserver">StoreEngine</a>
 <div class="block">StoreEngine is a factory that can create the objects necessary for HStore to operate.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html#org.apache.hadoop.hbase.regionserver">StoreFile</a>
 <div class="block">An interface to describe a store data file.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileInfo.html#org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>
 <div class="block">Describe a StoreFile (hfile, reference, link)</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileManager.html#org.apache.hadoop.hbase.regionserver">StoreFileManager</a>
 <div class="block">Manages the store files and basic metadata about that that determines the logical structure
  (e.g.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileReader.html#org.apache.hadoop.hbase.regionserver">StoreFileReader</a>
 <div class="block">Reader for a StoreFile.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StorefileRefresherChore.html#org.apache.hadoop.hbase.regionserver">StorefileRefresherChore</a>
 <div class="block">A chore for refreshing the store files for secondary regions hosted in the region server.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileScanner.html#org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>
 <div class="block">KeyValueScanner adaptor over the Reader.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html#org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>
 <div class="block">A StoreFile writer.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.Builder.html#org.apache.hadoop.hbase.regionserver">StoreFileWriter.Builder</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFlushContext.html#org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>
 <div class="block">A package protected interface for a store flushing.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreFlusher.html#org.apache.hadoop.hbase.regionserver">StoreFlusher</a>
 <div class="block">Store flusher interface.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StoreScanner.html#org.apache.hadoop.hbase.regionserver">StoreScanner</a>
 <div class="block">Scanner scans both the memstore and the Store.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StripeMultiFileWriter.html#org.apache.hadoop.hbase.regionserver">StripeMultiFileWriter</a>
 <div class="block">Base class for cell sink that separates the provided cells into multiple files for stripe
  compaction.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StripeStoreConfig.html#org.apache.hadoop.hbase.regionserver">StripeStoreConfig</a>
 <div class="block">Configuration class for stripe store and compactions.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StripeStoreFileManager.html#org.apache.hadoop.hbase.regionserver">StripeStoreFileManager</a>
 <div class="block">Stripe implementation of StoreFileManager.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StripeStoreFileManager.State.html#org.apache.hadoop.hbase.regionserver">StripeStoreFileManager.State</a>
 <div class="block">The state class.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/StripeStoreFlusher.StripeFlushRequest.html#org.apache.hadoop.hbase.regionserver">StripeStoreFlusher.StripeFlushRequest</a>
 <div class="block">Stripe flush request wrapper that writes a non-striped file.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html#org.apache.hadoop.hbase.regionserver">TimeRangeTracker</a>
 <div class="block">Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
  interval notation.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.Type.html#org.apache.hadoop.hbase.regionserver">TimeRangeTracker.Type</a>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/VersionedSegmentsList.html#org.apache.hadoop.hbase.regionserver">VersionedSegmentsList</a>
 <div class="block">A list of segment managers coupled with the version of the memstore (version at the time it was
  created).</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/regionserver/class-use/WrongRegionException.html#org.apache.hadoop.hbase.regionserver">WrongRegionException</a>
 <div class="block">Thrown when a request contains a key which is not part of this region</div>
 </td>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index b195b74..74ae4b0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index eb39c79..ef06c00 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -248,8 +248,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index 800a66a..c9ac434 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -192,8 +192,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 94bf898..754b69d 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -162,11 +162,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 4b0ca3e..bd591e2 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -192,9 +192,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 6b23a96..60c52af 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -211,9 +211,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HFileArchiveUtil.html b/devapidocs/org/apache/hadoop/hbase/util/HFileArchiveUtil.html
index 63bf058..a101a90 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HFileArchiveUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HFileArchiveUtil.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -208,19 +208,27 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i7" class="rowColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getStoreArchivePathForRootDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-byte:A-">getStoreArchivePathForRootDir</a></span>(org.apache.hadoop.fs.Path&nbsp;rootDir,
+                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region,
+                             byte[]&nbsp;family)</code>
+<div class="block">Gets the archive directory under specified root dir.</div>
+</td>
+</tr>
+<tr id="i8" class="altColor">
+<td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getTableArchivePath-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.TableName-">getTableArchivePath</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>
 <div class="block">Get the path to the table archive directory based on the configured archive directory.</div>
 </td>
 </tr>
-<tr id="i8" class="altColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getTableArchivePath-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-">getTableArchivePath</a></span>(org.apache.hadoop.fs.Path&nbsp;rootdir,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>
 <div class="block">Get the path to the table archive directory based on the configured archive directory.</div>
 </td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HFileArchiveUtil.html#getTableName-org.apache.hadoop.fs.Path-">getTableName</a></span>(org.apache.hadoop.fs.Path&nbsp;archivePath)</code>&nbsp;</td>
 </tr>
@@ -339,13 +347,38 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
+<a name="getStoreArchivePathForRootDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-byte:A-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getStoreArchivePathForRootDir</h4>
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.100">getStoreArchivePathForRootDir</a>(org.apache.hadoop.fs.Path&nbsp;rootDir,
+                                                                      <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region,
+                                                                      byte[]&nbsp;family)</pre>
+<div class="block">Gets the archive directory under specified root dir. One scenario where this is useful is
+ when WAL and root dir are configured under different file systems,
+ i.e. root dir on S3 and WALs on HDFS.
+ This is mostly useful for archiving recovered edits, when
+ <b>hbase.region.archive.recovered.edits</b> is enabled.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>rootDir</code> - <code>Path</code> the root dir under which archive path should be created.</dd>
+<dd><code>region</code> - parent region information under which the store currently lives</dd>
+<dd><code>family</code> - name of the family in the store</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd><code>Path</code> to the WAL FS directory to archive the given store
+         or <tt>null</tt> if it should not be archived</dd>
+</dl>
+</li>
+</ul>
 <a name="getRegionArchiveDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-org.apache.hadoop.fs.Path-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionArchiveDir</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.95">getRegionArchiveDir</a>(org.apache.hadoop.fs.Path&nbsp;rootDir,
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.112">getRegionArchiveDir</a>(org.apache.hadoop.fs.Path&nbsp;rootDir,
                                                             <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                             org.apache.hadoop.fs.Path&nbsp;regiondir)</pre>
 <div class="block">Get the archive directory for a given region under the specified table</div>
@@ -365,7 +398,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionArchiveDir</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.114">getRegionArchiveDir</a>(org.apache.hadoop.fs.Path&nbsp;rootDir,
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.131">getRegionArchiveDir</a>(org.apache.hadoop.fs.Path&nbsp;rootDir,
                                                             <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</pre>
 <div class="block">Get the archive directory for a given region under the specified table</div>
@@ -386,7 +419,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableArchivePath</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.132">getTableArchivePath</a>(org.apache.hadoop.fs.Path&nbsp;rootdir,
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.149">getTableArchivePath</a>(org.apache.hadoop.fs.Path&nbsp;rootdir,
                                                             <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</pre>
 <div class="block">Get the path to the table archive directory based on the configured archive directory.
  <p>
@@ -409,7 +442,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableArchivePath</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.144">getTableArchivePath</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.161">getTableArchivePath</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                             <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the path to the table archive directory based on the configured archive directory.
@@ -432,7 +465,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getArchivePath</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.158">getArchivePath</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.175">getArchivePath</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the full path to the archive directory on the configured
  <a href="../../../../../org/apache/hadoop/hbase/master/MasterFileSystem.html" title="class in org.apache.hadoop.hbase.master"><code>MasterFileSystem</code></a></div>
@@ -453,7 +486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getArchivePath</h4>
-<pre>private static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.169">getArchivePath</a>(org.apache.hadoop.fs.Path&nbsp;rootdir)</pre>
+<pre>private static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.186">getArchivePath</a>(org.apache.hadoop.fs.Path&nbsp;rootdir)</pre>
 <div class="block">Get the full path to the archive directory on the configured
  <a href="../../../../../org/apache/hadoop/hbase/master/MasterFileSystem.html" title="class in org.apache.hadoop.hbase.master"><code>MasterFileSystem</code></a></div>
 <dl>
@@ -471,7 +504,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.176">getTableName</a>(org.apache.hadoop.fs.Path&nbsp;archivePath)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HFileArchiveUtil.html#line.193">getTableName</a>(org.apache.hadoop.fs.Path&nbsp;archivePath)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index f55b3a8..64acdf0 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -559,14 +559,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index bdae3d8..8c4c41b 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -192,8 +192,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index c089bf2..527520c 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1992,7 +1992,11 @@
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.RowLockContext</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.RowLockImpl</span></a> (implements org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/Region.RowLock.html" title="interface in org.apache.hadoop.hbase.regionserver">Region.RowLock</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.WriteState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionFileSystem</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionFileSystem</span></a>
+<ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionWALFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegionWALFileSystem</span></a></li>
+</ul>
+</li>
 <li type="circle">org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase"><span class="typeNameLink">HRegionInfo</span></a> (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop. [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HbckRegionInfo.MetaEntry.html" title="class in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckRegionInfo.MetaEntry</span></a></li>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 02fc4ef..c9cf102 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  justification="Intentional; to be modified in test")<a name="line.10"></a>
 <span class="sourceLineNo">011</span>public class Version {<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String version = new String("3.0.0-SNAPSHOT");<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String revision = "b1df7df0e0e09f1844a0ff9c0fc1c2cac8654a8e";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String revision = "b08697ae4a347f34273253e33ba91bb6b7ade5e0";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String user = "jenkins";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String date = "Sat Nov 23 14:36:42 UTC 2019";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String date = "Sun Nov 24 14:38:14 UTC 2019";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>  public static final String url = "git://jenkins-websites-he-de.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.16"></a>
 <span class="sourceLineNo">017</span>  public static final String srcChecksum = "(stdin)=";<a name="line.17"></a>
 <span class="sourceLineNo">018</span>}<a name="line.18"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
index 0343488..a9dcefd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.File.html
@@ -46,733 +46,769 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.fs.PathFilter;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.Logger;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.LoggerFactory;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.53"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HConstants;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>/**<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * the state of the system.<a name="line.58"></a>
-<span class="sourceLineNo">059</span> */<a name="line.59"></a>
-<span class="sourceLineNo">060</span>@InterfaceAudience.Private<a name="line.60"></a>
-<span class="sourceLineNo">061</span>public class HFileArchiver {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static final String SEPARATOR = ".";<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /** Number of retries in case of fs operation failure */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      new Function&lt;File, Path&gt;() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        @Override<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        public Path apply(File file) {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>          return file == null ? null : file.getPath();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      };<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.76"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * the state of the system.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class HFileArchiver {<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final String SEPARATOR = ".";<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  /** Number of retries in case of fs operation failure */<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      new Function&lt;File, Path&gt;() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        @Override<a name="line.72"></a>
+<span class="sourceLineNo">073</span>        public Path apply(File file) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>          return file == null ? null : file.getPath();<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      };<a name="line.76"></a>
 <span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private HFileArchiver() {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    // hidden ctor since this is just a util<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * @return True if the Region exits in the filesystem.<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      throws IOException {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    return fs.exists(regionDir);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * @param conf the configuration to use<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @param fs the file system object<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   * @param info RegionInfo for region to be deleted<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   */<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      throws IOException {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          the archive path)<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *         operations could not complete.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if the request cannot be completed<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // otherwise, we archive the files<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    // make sure we can archive<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    if (tableDir == null || regionDir == null) {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      if (regionDir != null) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // the archived files correctly or not.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      return false;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.131"></a>
+<span class="sourceLineNo">078</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private HFileArchiver() {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    // hidden ctor since this is just a util<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * @return True if the Region exits in the filesystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      throws IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    return fs.exists(regionDir);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * @param conf the configuration to use<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   * @param fs the file system object<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @param info RegionInfo for region to be deleted<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      throws IOException {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   *          the archive path)<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *         operations could not complete.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the request cannot be completed<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    // otherwise, we archive the files<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // make sure we can archive<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    if (tableDir == null || regionDir == null) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      if (regionDir != null) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      // the archived files correctly or not.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      return false;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
 <span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    // make sure the regiondir lives under the tabledir<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        FSUtils.getTableName(tableDir),<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        regionDir.getName());<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    // otherwise, we attempt to archive the store files<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    // build collection of just the store directories to archive<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    PathFilter nonHidden = new PathFilter() {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      public boolean accept(Path file) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    };<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    // if there no files, we can just delete the directory and return;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    if (storeDirs == null) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // convert the files in the region to a File<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.debug("Archiving " + toArchive);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        EnvironmentEdgeManager.currentTime());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    if (!failedArchive.isEmpty()) {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      throw new FailedArchiveException(<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.165"></a>
-<span class="sourceLineNo">166</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    // if that was successful, then we delete the region<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Archive the specified regions in parallel.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf the configuration to use<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   *                            the archive path)<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   *                             path)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   *                                  path)<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * @throws IOException if the request cannot be completed<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    for (Path regionDir: regionDirList) {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        return null;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      });<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      futures.add(future);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    try {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        future.get();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    } catch (InterruptedException e) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      throw new InterruptedIOException(e.getMessage());<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    } catch (ExecutionException e) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      throw new IOException(e.getCause());<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    }<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    if (archiveExecutor == null) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        getThreadFactory());<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    return archiveExecutor;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  // issues in some tests.<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static ThreadFactory getThreadFactory() {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    return new ThreadFactory() {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>      @Override<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      public Thread newThread(Runnable r) {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        Thread t = new Thread(r, name);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        t.setDaemon(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        return t;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    };<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * Remove from the specified region the store files of the specified column family,<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * either by archiving them or outright deletion<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * @param fs the filesystem where the store files live<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   * @param parent Parent region hosting the store files<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * @param family the family hosting the store files<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Removes from the specified region the store files of the specified column family,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * either by archiving them or outright deletion<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @param fs the filesystem where the store files live<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.256"></a>
-<span class="sourceLineNo">257</span>   * @param parent Parent region hosting the store files<a name="line.257"></a>
-<span class="sourceLineNo">258</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.258"></a>
-<span class="sourceLineNo">259</span>   * @param family the family hosting the store files<a name="line.259"></a>
-<span class="sourceLineNo">260</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.260"></a>
-<span class="sourceLineNo">261</span>   */<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (storeFiles == null) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          Bytes.toString(family));<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    // do the actual archive<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.276"></a>
-<span class="sourceLineNo">277</span>        EnvironmentEdgeManager.currentTime());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    if (!failedArchive.isEmpty()){<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.287"></a>
-<span class="sourceLineNo">288</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.288"></a>
-<span class="sourceLineNo">289</span>   * @param fs the filesystem where the store files live<a name="line.289"></a>
-<span class="sourceLineNo">290</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * @param family the family hosting the store files<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      throws IOException, FailedArchiveException {<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (fs == null) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      return;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>    // short circuit if we don't have any files to delete<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    if (compactedFiles.isEmpty()) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>      LOG.debug("No files to dispose of, done!");<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      return;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>    // build the archive path<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        "Need to have a region and a family to archive from.");<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // otherwise we attempt to archive the store files<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    LOG.debug("Archiving compacted files.");<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>    // Wrap the storefile into a File<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Collection&lt;File&gt; storeFiles =<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>    // do the actual archive<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    List&lt;File&gt; failedArchive =<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>    if (!failedArchive.isEmpty()){<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.339"></a>
-<span class="sourceLineNo">340</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Archive the store file<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * @param fs the filesystem where the store files live<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * @param regionInfo region hosting the store files<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   * @param family the family hosting the store files<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param storeFile file to be archived<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
-<span class="sourceLineNo">356</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    }<a name="line.363"></a>
+<span class="sourceLineNo">133</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>    // make sure the regiondir lives under the tabledir<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        FSUtils.getTableName(tableDir),<a name="line.138"></a>
+<span class="sourceLineNo">139</span>        regionDir.getName());<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    // otherwise, we attempt to archive the store files<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>    // build collection of just the store directories to archive<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    PathFilter nonHidden = new PathFilter() {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      @Override<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      public boolean accept(Path file) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    };<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    // if there no files, we can just delete the directory and return;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (storeDirs == null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    // convert the files in the region to a File<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    LOG.debug("Archiving " + toArchive);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        EnvironmentEdgeManager.currentTime());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    if (!failedArchive.isEmpty()) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      throw new FailedArchiveException(<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    // if that was successful, then we delete the region<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Archive the specified regions in parallel.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf the configuration to use<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   *                            the archive path)<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   *                             path)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   *                                  path)<a name="line.184"></a>
+<span class="sourceLineNo">185</span>   * @throws IOException if the request cannot be completed<a name="line.185"></a>
+<span class="sourceLineNo">186</span>   */<a name="line.186"></a>
+<span class="sourceLineNo">187</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    for (Path regionDir: regionDirList) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        return null;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      });<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      futures.add(future);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    try {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>        future.get();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    } catch (InterruptedException e) {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      throw new InterruptedIOException(e.getMessage());<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    } catch (ExecutionException e) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      throw new IOException(e.getCause());<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    }<a name="line.205"></a>
+<span class="sourceLineNo">206</span>  }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (archiveExecutor == null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        getThreadFactory());<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    return archiveExecutor;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  // issues in some tests.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private static ThreadFactory getThreadFactory() {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    return new ThreadFactory() {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>      @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      public Thread newThread(Runnable r) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        Thread t = new Thread(r, name);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        t.setDaemon(true);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        return t;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    };<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * Remove from the specified region the store files of the specified column family,<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * either by archiving them or outright deletion<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   * @param fs the filesystem where the store files live<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   * @param parent Parent region hosting the store files<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * @param family the family hosting the store files<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  }<a name="line.252"></a>
+<span class="sourceLineNo">253</span><a name="line.253"></a>
+<span class="sourceLineNo">254</span>  /**<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * Removes from the specified region the store files of the specified column family,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * either by archiving them or outright deletion<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   * @param fs the filesystem where the store files live<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param parent Parent region hosting the store files<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param family the family hosting the store files<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   */<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    if (storeFiles == null) {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          Bytes.toString(family));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      return;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>    // do the actual archive<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        EnvironmentEdgeManager.currentTime());<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (!failedArchive.isEmpty()){<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.282"></a>
+<span class="sourceLineNo">283</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    }<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>  /**<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.290"></a>
+<span class="sourceLineNo">291</span>   * @param fs the filesystem where the store files live<a name="line.291"></a>
+<span class="sourceLineNo">292</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.292"></a>
+<span class="sourceLineNo">293</span>   * @param family the family hosting the store files<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.295"></a>
+<span class="sourceLineNo">296</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   */<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throws IOException {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    archive(fs, regionInfo, family, compactedFiles, storeArchiveDir);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  }<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /**<a name="line.305"></a>
+<span class="sourceLineNo">306</span>   * Archive recovered edits using existing logic for archiving store files. This is currently only<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * relevant when &lt;b&gt;hbase.region.archive.recovered.edits&lt;/b&gt; is true, as recovered edits shouldn't<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   * be kept after replay. In theory, we could use very same method available for archiving<a name="line.308"></a>
+<span class="sourceLineNo">309</span>   * store files, but supporting WAL dir and store files on different FileSystems added the need for<a name="line.309"></a>
+<span class="sourceLineNo">310</span>   * extra validation of the passed FileSystem instance and the path where the archiving edits<a name="line.310"></a>
+<span class="sourceLineNo">311</span>   * should be placed.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>   * @param conf {@link Configuration} to determine the archive directory.<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * @param fs the filesystem used for storing WAL files.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * @param regionInfo {@link RegionInfo} a pseudo region representation for the archiving logic.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * @param family a pseudo familiy representation for the archiving logic.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * @param replayedEdits the recovered edits to be archived.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @throws IOException if files can't be achived due to some internal error.<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  public static void archiveRecoveredEdits(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    byte[] family, Collection&lt;HStoreFile&gt; replayedEdits)<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    throws IOException {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    String workingDir = conf.get(CommonFSUtils.HBASE_WAL_DIR, conf.get(HConstants.HBASE_DIR));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    //extra sanity checks for the right FS<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    Path path = new Path(workingDir);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    if(path.isAbsoluteAndSchemeAuthorityNull()){<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      //no schema specified on wal dir value, so it's on same FS as StoreFiles<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      path = new Path(conf.get(HConstants.HBASE_DIR));<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    if(path.toUri().getScheme()!=null &amp;&amp; !path.toUri().getScheme().equals(fs.getScheme())){<a name="line.329"></a>
+<span class="sourceLineNo">330</span>      throw new IOException("Wrong file system! Should be " + path.toUri().getScheme() +<a name="line.330"></a>
+<span class="sourceLineNo">331</span>        ", but got " +  fs.getScheme());<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    path = HFileArchiveUtil.getStoreArchivePathForRootDir(path, regionInfo, family);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    archive(fs, regionInfo, family, replayedEdits, path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  private static void archive(FileSystem fs, RegionInfo regionInfo, byte[] family,<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    Collection&lt;HStoreFile&gt; compactedFiles, Path storeArchiveDir) throws IOException {<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    if (fs == null) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.341"></a>
+<span class="sourceLineNo">342</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      return;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>    // short circuit if we don't have any files to delete<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    if (compactedFiles.isEmpty()) {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      LOG.debug("No files to dispose of, done!");<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      return;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    }<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // build the archive path<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        "Need to have a region and a family to archive from.");<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.358"></a>
+<span class="sourceLineNo">359</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>    // otherwise we attempt to archive the store files<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    LOG.debug("Archiving compacted files.");<a name="line.363"></a>
 <span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>    // do the actual archive<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    File file = new FileablePath(fs, storeFile);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.369"></a>
-<span class="sourceLineNo">370</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.370"></a>
-<span class="sourceLineNo">371</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * renaming of the existing file and then archive the passed in files.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   *          the files to archive are directories, will append the name of the<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *          directory to the base archive directory name, creating a parallel<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   *          structure.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param toArchive files/directories that need to be archvied<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param start time the archiving started - used for resolving archive<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   *          conflicts.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @return the list of failed to archive files.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    // short circuit if no files to move<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    if (toArchive.isEmpty()) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      return Collections.emptyList();<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span><a name="line.395"></a>
-<span class="sourceLineNo">396</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // make sure the archive directory exists<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.401"></a>
-<span class="sourceLineNo">402</span>            + ", quitting archive attempt.");<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    String startTime = Long.toString(start);<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    for (File file : toArchive) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // if its a file archive it<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      try {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        LOG.trace("Archiving {}", file);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>        if (file.isFile()) {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>          // attempt to archive the file<a name="line.414"></a>
-<span class="sourceLineNo">415</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            failures.add(file);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          }<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        } else {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // otherwise its a directory and we need to archive all files<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // so we add the directory name to the one base archive<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // and then get all the files from that directory and attempt to<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          // archive those too<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.426"></a>
-<span class="sourceLineNo">427</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        }<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      } catch (IOException e) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        failures.add(file);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      }<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    return failures;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>  /**<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>   * &lt;p&gt;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * the archive directory and the new file is put in its place.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.443"></a>
-<span class="sourceLineNo">444</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.444"></a>
-<span class="sourceLineNo">445</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   *         problem, but the operation still completed.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      String archiveStartTime) throws IOException {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    // build path as it should be in the archive<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String filename = currentFile.getName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    // is included just for that 1 in trillion chance.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    if (fs.exists(archiveFile)) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.460"></a>
-<span class="sourceLineNo">461</span>          "overwriting current.", archiveFile);<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      // move the archive file to the stamped backup<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            + ", deleting existing file in favor of newer.");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        // try to delete the exisiting file, if we can't rename it<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        if (!fs.delete(archiveFile, false)) {<a name="line.469"></a>
-<span class="sourceLineNo">470</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.471"></a>
-<span class="sourceLineNo">472</span>              + ") to make room for similarly named file.");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    }<a name="line.476"></a>
-<span class="sourceLineNo">477</span><a name="line.477"></a>
-<span class="sourceLineNo">478</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    // at this point, we should have a free spot for the archive file<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    boolean success = false;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      if (i &gt; 0) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>        // Ensure that the archive directory exists.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        // The previous "move to archive" operation has failed probably because<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        try {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>          if (!fs.exists(archiveDir)) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            if (fs.mkdirs(archiveDir)) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>            }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        } catch (IOException e) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>        }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      }<a name="line.497"></a>
+<span class="sourceLineNo">365</span>    // Wrap the storefile into a File<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    Collection&lt;File&gt; storeFiles =<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    // do the actual archive<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    List&lt;File&gt; failedArchive =<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.372"></a>
+<span class="sourceLineNo">373</span><a name="line.373"></a>
+<span class="sourceLineNo">374</span>    if (!failedArchive.isEmpty()){<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * Archive the store file<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * @param fs the filesystem where the store files live<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * @param regionInfo region hosting the store files<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.387"></a>
+<span class="sourceLineNo">388</span>   * @param family the family hosting the store files<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * @param storeFile file to be archived<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   */<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.397"></a>
+<span class="sourceLineNo">398</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    }<a name="line.399"></a>
+<span class="sourceLineNo">400</span><a name="line.400"></a>
+<span class="sourceLineNo">401</span>    // do the actual archive<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    File file = new FileablePath(fs, storeFile);<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.405"></a>
+<span class="sourceLineNo">406</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.406"></a>
+<span class="sourceLineNo">407</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    }<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  }<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * renaming of the existing file and then archive the passed in files.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   *          the files to archive are directories, will append the name of the<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   *          directory to the base archive directory name, creating a parallel<a name="line.417"></a>
+<span class="sourceLineNo">418</span>   *          structure.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>   * @param toArchive files/directories that need to be archvied<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * @param start time the archiving started - used for resolving archive<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   *          conflicts.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * @return the list of failed to archive files.<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    // short circuit if no files to move<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    if (toArchive.isEmpty()) {<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      return Collections.emptyList();<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    }<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    // make sure the archive directory exists<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.437"></a>
+<span class="sourceLineNo">438</span>            + ", quitting archive attempt.");<a name="line.438"></a>
+<span class="sourceLineNo">439</span>      }<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    String startTime = Long.toString(start);<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    for (File file : toArchive) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      // if its a file archive it<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      try {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        LOG.trace("Archiving {}", file);<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        if (file.isFile()) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          // attempt to archive the file<a name="line.450"></a>
+<span class="sourceLineNo">451</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>            failures.add(file);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          // otherwise its a directory and we need to archive all files<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.457"></a>
+<span class="sourceLineNo">458</span>          // so we add the directory name to the one base archive<a name="line.458"></a>
+<span class="sourceLineNo">459</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.459"></a>
+<span class="sourceLineNo">460</span>          // and then get all the files from that directory and attempt to<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          // archive those too<a name="line.461"></a>
+<span class="sourceLineNo">462</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        }<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      } catch (IOException e) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        failures.add(file);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      }<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    }<a name="line.469"></a>
+<span class="sourceLineNo">470</span>    return failures;<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * &lt;p&gt;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * the archive directory and the new file is put in its place.<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   *         problem, but the operation still completed.<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      String archiveStartTime) throws IOException {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>    // build path as it should be in the archive<a name="line.487"></a>
+<span class="sourceLineNo">488</span>    String filename = currentFile.getName();<a name="line.488"></a>
+<span class="sourceLineNo">489</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.490"></a>
+<span class="sourceLineNo">491</span><a name="line.491"></a>
+<span class="sourceLineNo">492</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    // is included just for that 1 in trillion chance.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    if (fs.exists(archiveFile)) {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.496"></a>
+<span class="sourceLineNo">497</span>          "overwriting current.", archiveFile);<a name="line.497"></a>
 <span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>      try {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      } catch (FileNotFoundException fnfe) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.502"></a>
-<span class="sourceLineNo">503</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        success = true;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      } catch (IOException e) {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        success = false;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      }<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    if (!success) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      LOG.error("Failed to archive " + currentFile);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return false;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
+<span class="sourceLineNo">499</span>      // move the archive file to the stamped backup<a name="line.499"></a>
+<span class="sourceLineNo">500</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.502"></a>
+<span class="sourceLineNo">503</span>            + ", deleting existing file in favor of newer.");<a name="line.503"></a>
+<span class="sourceLineNo">504</span>        // try to delete the exisiting file, if we can't rename it<a name="line.504"></a>
+<span class="sourceLineNo">505</span>        if (!fs.delete(archiveFile, false)) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.506"></a>
+<span class="sourceLineNo">507</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.507"></a>
+<span class="sourceLineNo">508</span>              + ") to make room for similarly named file.");<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        }<a name="line.509"></a>
+<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>    }<a name="line.512"></a>
+<span class="sourceLineNo">513</span><a name="line.513"></a>
+<span class="sourceLineNo">514</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.514"></a>
 <span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return true;<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
-<span class="sourceLineNo">519</span><a name="line.519"></a>
-<span class="sourceLineNo">520</span>  /**<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param fs FileSystem from which to delete the region<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @throws IOException on filesystem operation failure<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    if (fs.delete(regionDir, true)) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      LOG.debug("Deleted {}", regionDir);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      return true;<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    }<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    return false;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * Just do a simple delete of the given store files<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * &lt;p&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * &lt;p&gt;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @param compactedFiles store files to delete from the file system.<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   *           throwing the exception, rather than failing at the first file.<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      throws IOException {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    LOG.debug("Deleting files without archiving.");<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      try {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>        hsf.deleteStoreFile();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      } catch (IOException e) {<a name="line.553"></a>
-<span class="sourceLineNo">554</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.554"></a>
-<span class="sourceLineNo">555</span>        errors.add(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    if (errors.size() &gt; 0) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      throw MultipleIOException.createIOException(errors);<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>  }<a name="line.561"></a>
-<span class="sourceLineNo">562</span><a name="line.562"></a>
-<span class="sourceLineNo">563</span>  /**<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * archival/removal of files<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    protected final FileSystem fs;<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    public FileConverter(FileSystem fs) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      this.fs = fs;<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    }<a name="line.573"></a>
-<span class="sourceLineNo">574</span>  }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>  /**<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   */<a name="line.578"></a>
-<span class="sourceLineNo">579</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    public FileStatusConverter(FileSystem fs) {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      super(fs);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    }<a name="line.582"></a>
-<span class="sourceLineNo">583</span><a name="line.583"></a>
-<span class="sourceLineNo">584</span>    @Override<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    public File apply(FileStatus input) {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      return new FileablePath(fs, input.getPath());<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    }<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * methods<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   */<a name="line.593"></a>
-<span class="sourceLineNo">594</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    public StoreToFile(FileSystem fs) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      super(fs);<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
+<span class="sourceLineNo">516</span>    // at this point, we should have a free spot for the archive file<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    boolean success = false;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>      if (i &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        // Ensure that the archive directory exists.<a name="line.520"></a>
+<span class="sourceLineNo">521</span>        // The previous "move to archive" operation has failed probably because<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          if (!fs.exists(archiveDir)) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>            if (fs.mkdirs(archiveDir)) {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.527"></a>
+<span class="sourceLineNo">528</span>            }<a name="line.528"></a>
+<span class="sourceLineNo">529</span>          }<a name="line.529"></a>
+<span class="sourceLineNo">530</span>        } catch (IOException e) {<a name="line.530"></a>
+<span class="sourceLineNo">531</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      }<a name="line.533"></a>
+<span class="sourceLineNo">534</span><a name="line.534"></a>
+<span class="sourceLineNo">535</span>      try {<a name="line.535"></a>
+<span class="sourceLineNo">536</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      } catch (FileNotFoundException fnfe) {<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.538"></a>
+<span class="sourceLineNo">539</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        success = true;<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      } catch (IOException e) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>        success = false;<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      }<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!success) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      LOG.error("Failed to archive " + currentFile);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      return false;<a name="line.549"></a>
+<span class="sourceLineNo">550</span>    }<a name="line.550"></a>
+<span class="sourceLineNo">551</span><a name="line.551"></a>
+<span class="sourceLineNo">552</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>    return true;<a name="line.553"></a>
+<span class="sourceLineNo">554</span>  }<a name="line.554"></a>
+<span class="sourceLineNo">555</span><a name="line.555"></a>
+<span class="sourceLineNo">556</span>  /**<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param fs FileSystem from which to delete the region<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException on filesystem operation failure<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      throws IOException {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    if (fs.delete(regionDir, true)) {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>      LOG.debug("Deleted {}", regionDir);<a name="line.566"></a>
+<span class="sourceLineNo">567</span>      return true;<a name="line.567"></a>
+<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    return false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span>  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>  /**<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   * Just do a simple delete of the given store files<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   * &lt;p&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   * &lt;p&gt;<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   * @param compactedFiles store files to delete from the file system.<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   *           throwing the exception, rather than failing at the first file.<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      throws IOException {<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    LOG.debug("Deleting files without archiving.");<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
+<span class="sourceLineNo">588</span>        hsf.deleteStoreFile();<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      } catch (IOException e) {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        errors.add(e);<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      }<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    }<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    if (errors.size() &gt; 0) {<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      throw MultipleIOException.createIOException(errors);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
+<span class="sourceLineNo">597</span>  }<a name="line.597"></a>
 <span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    @Override<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    public File apply(HStoreFile input) {<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      return new FileableStoreFile(fs, input);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    }<a name="line.602"></a>
-<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
-<span class="sourceLineNo">604</span><a name="line.604"></a>
-<span class="sourceLineNo">605</span>  /**<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   * Wrapper to handle file operations uniformly<a name="line.606"></a>
-<span class="sourceLineNo">607</span>   */<a name="line.607"></a>
-<span class="sourceLineNo">608</span>  private static abstract class File {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    protected final FileSystem fs;<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public File(FileSystem fs) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      this.fs = fs;<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>     * Delete the file<a name="line.616"></a>
-<span class="sourceLineNo">617</span>     * @throws IOException on failure<a name="line.617"></a>
-<span class="sourceLineNo">618</span>     */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    abstract void delete() throws IOException;<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    /**<a name="line.621"></a>
-<span class="sourceLineNo">622</span>     * Check to see if this is a file or a directory<a name="line.622"></a>
-<span class="sourceLineNo">623</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.623"></a>
-<span class="sourceLineNo">624</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.624"></a>
-<span class="sourceLineNo">625</span>     */<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    abstract boolean isFile() throws IOException;<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>    /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>     * @return if this is a directory, returns all the children in the<a name="line.629"></a>
-<span class="sourceLineNo">630</span>     *         directory, otherwise returns an empty list<a name="line.630"></a>
-<span class="sourceLineNo">631</span>     * @throws IOException<a name="line.631"></a>
-<span class="sourceLineNo">632</span>     */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.633"></a>
+<span class="sourceLineNo">599</span>  /**<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * archival/removal of files<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
+<span class="sourceLineNo">604</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    protected final FileSystem fs;<a name="line.605"></a>
+<span class="sourceLineNo">606</span><a name="line.606"></a>
+<span class="sourceLineNo">607</span>    public FileConverter(FileSystem fs) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      this.fs = fs;<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  /**<a name="line.612"></a>
+<span class="sourceLineNo">613</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   */<a name="line.614"></a>
+<span class="sourceLineNo">615</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>    public FileStatusConverter(FileSystem fs) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      super(fs);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span><a name="line.619"></a>
+<span class="sourceLineNo">620</span>    @Override<a name="line.620"></a>
+<span class="sourceLineNo">621</span>    public File apply(FileStatus input) {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      return new FileablePath(fs, input.getPath());<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * methods<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   */<a name="line.629"></a>
+<span class="sourceLineNo">630</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>    public StoreToFile(FileSystem fs) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      super(fs);<a name="line.632"></a>
+<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
 <span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * close any outside readers of the file<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * @throws IOException<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     */<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    abstract void close() throws IOException;<a name="line.639"></a>
+<span class="sourceLineNo">635</span>    @Override<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    public File apply(HStoreFile input) {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      return new FileableStoreFile(fs, input);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>  }<a name="line.639"></a>
 <span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    /**<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     *         file name)<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     */<a name="line.644"></a>
-<span class="sourceLineNo">645</span>    abstract String getName();<a name="line.645"></a>
+<span class="sourceLineNo">641</span>  /**<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * Wrapper to handle file operations uniformly<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  private static abstract class File {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>    protected final FileSystem fs;<a name="line.645"></a>
 <span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    /**<a name="line.647"></a>
-<span class="sourceLineNo">648</span>     * @return the path to this file<a name="line.648"></a>
-<span class="sourceLineNo">649</span>     */<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    abstract Path getPath();<a name="line.650"></a>
-<span class="sourceLineNo">651</span><a name="line.651"></a>
-<span class="sourceLineNo">652</span>    /**<a name="line.652"></a>
-<span class="sourceLineNo">653</span>     * Move the file to the given destination<a name="line.653"></a>
-<span class="sourceLineNo">654</span>     * @param dest<a name="line.654"></a>
-<span class="sourceLineNo">655</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.655"></a>
-<span class="sourceLineNo">656</span>     * @throws IOException<a name="line.656"></a>
-<span class="sourceLineNo">657</span>     */<a name="line.657"></a>
-<span class="sourceLineNo">658</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      this.close();<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      Path p = this.getPath();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    }<a name="line.662"></a>
+<span class="sourceLineNo">647</span>    public File(FileSystem fs) {<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      this.fs = fs;<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    /**<a name="line.651"></a>
+<span class="sourceLineNo">652</span>     * Delete the file<a name="line.652"></a>
+<span class="sourceLineNo">653</span>     * @throws IOException on failure<a name="line.653"></a>
+<span class="sourceLineNo">654</span>     */<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    abstract void delete() throws IOException;<a name="line.655"></a>
+<span class="sourceLineNo">656</span><a name="line.656"></a>
+<span class="sourceLineNo">657</span>    /**<a name="line.657"></a>
+<span class="sourceLineNo">658</span>     * Check to see if this is a file or a directory<a name="line.658"></a>
+<span class="sourceLineNo">659</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.659"></a>
+<span class="sourceLineNo">660</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.660"></a>
+<span class="sourceLineNo">661</span>     */<a name="line.661"></a>
+<span class="sourceLineNo">662</span>    abstract boolean isFile() throws IOException;<a name="line.662"></a>
 <span class="sourceLineNo">663</span><a name="line.663"></a>
 <span class="sourceLineNo">664</span>    /**<a name="line.664"></a>
-<span class="sourceLineNo">665</span>     * @return the {@link FileSystem} on which this file resides<a name="line.665"></a>
-<span class="sourceLineNo">666</span>     */<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public FileSystem getFileSystem() {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return this.fs;<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
+<span class="sourceLineNo">665</span>     * @return if this is a directory, returns all the children in the<a name="line.665"></a>
+<span class="sourceLineNo">666</span>     *         directory, otherwise returns an empty list<a name="line.666"></a>
+<span class="sourceLineNo">667</span>     * @throws IOException<a name="line.667"></a>
+<span class="sourceLineNo">668</span>     */<a name="line.668"></a>
+<span class="sourceLineNo">669</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.669"></a>
 <span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    @Override<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public String toString() {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    }<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
+<span class="sourceLineNo">671</span>    /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>     * close any outside readers of the file<a name="line.672"></a>
+<span class="sourceLineNo">673</span>     * @throws IOException<a name="line.673"></a>
+<span class="sourceLineNo">674</span>     */<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    abstract void close() throws IOException;<a name="line.675"></a>
 <span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  /**<a name="line.677"></a>
-<span class="sourceLineNo">678</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.678"></a>
-<span class="sourceLineNo">679</span>   */<a name="line.679"></a>
-<span class="sourceLineNo">680</span>  private static class FileablePath extends File {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    private final Path file;<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    private final FileStatusConverter getAsFile;<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      super(fs);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>      this.file = file;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>    @Override<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public void delete() throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    @Override<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    public String getName() {<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      return file.getName();<a name="line.697"></a>
+<span class="sourceLineNo">677</span>    /**<a name="line.677"></a>
+<span class="sourceLineNo">678</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.678"></a>
+<span class="sourceLineNo">679</span>     *         file name)<a name="line.679"></a>
+<span class="sourceLineNo">680</span>     */<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    abstract String getName();<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    /**<a name="line.683"></a>
+<span class="sourceLineNo">684</span>     * @return the path to this file<a name="line.684"></a>
+<span class="sourceLineNo">685</span>     */<a name="line.685"></a>
+<span class="sourceLineNo">686</span>    abstract Path getPath();<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>    /**<a name="line.688"></a>
+<span class="sourceLineNo">689</span>     * Move the file to the given destination<a name="line.689"></a>
+<span class="sourceLineNo">690</span>     * @param dest<a name="line.690"></a>
+<span class="sourceLineNo">691</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.691"></a>
+<span class="sourceLineNo">692</span>     * @throws IOException<a name="line.692"></a>
+<span class="sourceLineNo">693</span>     */<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>      this.close();<a name="line.695"></a>
+<span class="sourceLineNo">696</span>      Path p = this.getPath();<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.697"></a>
 <span class="sourceLineNo">698</span>    }<a name="line.698"></a>
 <span class="sourceLineNo">699</span><a name="line.699"></a>
-<span class="sourceLineNo">700</span>    @Override<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      if (fs.isFile(file)) {<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        return Collections.emptyList();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public boolean isFile() throws IOException {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return fs.isFile(file);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
+<span class="sourceLineNo">700</span>    /**<a name="line.700"></a>
+<span class="sourceLineNo">701</span>     * @return the {@link FileSystem} on which this file resides<a name="line.701"></a>
+<span class="sourceLineNo">702</span>     */<a name="line.702"></a>
+<span class="sourceLineNo">703</span>    public FileSystem getFileSystem() {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>      return this.fs;<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
+<span class="sourceLineNo">706</span><a name="line.706"></a>
+<span class="sourceLineNo">707</span>    @Override<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    public String toString() {<a name="line.708"></a>
+<span class="sourceLineNo">709</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  }<a name="line.711"></a>
 <span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public void close() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      // NOOP - files are implicitly closed on removal<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    Path getPath() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return file;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  /**<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * .<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   */<a name="line.727"></a>
-<span class="sourceLineNo">728</span>  private static class FileableStoreFile extends File {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    HStoreFile file;<a name="line.729"></a>
+<span class="sourceLineNo">713</span>  /**<a name="line.713"></a>
+<span class="sourceLineNo">714</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.714"></a>
+<span class="sourceLineNo">715</span>   */<a name="line.715"></a>
+<span class="sourceLineNo">716</span>  private static class FileablePath extends File {<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    private final Path file;<a name="line.717"></a>
+<span class="sourceLineNo">718</span>    private final FileStatusConverter getAsFile;<a name="line.718"></a>
+<span class="sourceLineNo">719</span><a name="line.719"></a>
+<span class="sourceLineNo">720</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      super(fs);<a name="line.721"></a>
+<span class="sourceLineNo">722</span>      this.file = file;<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    }<a name="line.724"></a>
+<span class="sourceLineNo">725</span><a name="line.725"></a>
+<span class="sourceLineNo">726</span>    @Override<a name="line.726"></a>
+<span class="sourceLineNo">727</span>    public void delete() throws IOException {<a name="line.727"></a>
+<span class="sourceLineNo">728</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
 <span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      super(fs);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      this.file = store;<a name="line.733"></a>
+<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    public String getName() {<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      return file.getName();<a name="line.733"></a>
 <span class="sourceLineNo">734</span>    }<a name="line.734"></a>
 <span class="sourceLineNo">735</span><a name="line.735"></a>
 <span class="sourceLineNo">736</span>    @Override<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    public void delete() throws IOException {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      file.deleteStoreFile();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>    @Override<a name="line.741"></a>
-<span class="sourceLineNo">742</span>    public String getName() {<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      return file.getPath().getName();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public boolean isFile() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      return true;<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>    @Override<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      // storefiles don't have children<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      return Collections.emptyList();<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    }<a name="line.755"></a>
-<span class="sourceLineNo">756</span><a name="line.756"></a>
-<span class="sourceLineNo">757</span>    @Override<a name="line.757"></a>
-<span class="sourceLineNo">758</span>    public void close() throws IOException {<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      file.closeStoreFile(true);<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    Path getPath() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return file.getPath();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>}<a name="line.767"></a>
+<span class="sourceLineNo">737</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.737"></a>
+<span class="sourceLineNo">738</span>      if (fs.isFile(file)) {<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        return Collections.emptyList();<a name="line.739"></a>
+<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
+<span class="sourceLineNo">741</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>    @Override<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    public boolean isFile() throws IOException {<a name="line.745"></a>
+<span class="sourceLineNo">746</span>      return fs.isFile(file);<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    }<a name="line.747"></a>
+<span class="sourceLineNo">748</span><a name="line.748"></a>
+<span class="sourceLineNo">749</span>    @Override<a name="line.749"></a>
+<span class="sourceLineNo">750</span>    public void close() throws IOException {<a name="line.750"></a>
+<span class="sourceLineNo">751</span>      // NOOP - files are implicitly closed on removal<a name="line.751"></a>
+<span class="sourceLineNo">752</span>    }<a name="line.752"></a>
+<span class="sourceLineNo">753</span><a name="line.753"></a>
+<span class="sourceLineNo">754</span>    @Override<a name="line.754"></a>
+<span class="sourceLineNo">755</span>    Path getPath() {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>      return file;<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    }<a name="line.757"></a>
+<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
+<span class="sourceLineNo">759</span><a name="line.759"></a>
+<span class="sourceLineNo">760</span>  /**<a name="line.760"></a>
+<span class="sourceLineNo">761</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * .<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   */<a name="line.763"></a>
+<span class="sourceLineNo">764</span>  private static class FileableStoreFile extends File {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>    HStoreFile file;<a name="line.765"></a>
+<span class="sourceLineNo">766</span><a name="line.766"></a>
+<span class="sourceLineNo">767</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      super(fs);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      this.file = store;<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    }<a name="line.770"></a>
+<span class="sourceLineNo">771</span><a name="line.771"></a>
+<span class="sourceLineNo">772</span>    @Override<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    public void delete() throws IOException {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      file.deleteStoreFile();<a name="line.774"></a>
+<span class="sourceLineNo">775</span>    }<a name="line.775"></a>
+<span class="sourceLineNo">776</span><a name="line.776"></a>
+<span class="sourceLineNo">777</span>    @Override<a name="line.777"></a>
+<span class="sourceLineNo">778</span>    public String getName() {<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      return file.getPath().getName();<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
+<span class="sourceLineNo">781</span><a name="line.781"></a>
+<span class="sourceLineNo">782</span>    @Override<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    public boolean isFile() {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>      return true;<a name="line.784"></a>
+<span class="sourceLineNo">785</span>    }<a name="line.785"></a>
+<span class="sourceLineNo">786</span><a name="line.786"></a>
+<span class="sourceLineNo">787</span>    @Override<a name="line.787"></a>
+<span class="sourceLineNo">788</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.788"></a>
+<span class="sourceLineNo">789</span>      // storefiles don't have children<a name="line.789"></a>
+<span class="sourceLineNo">790</span>      return Collections.emptyList();<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
+<span class="sourceLineNo">792</span><a name="line.792"></a>
+<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
+<span class="sourceLineNo">794</span>    public void close() throws IOException {<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      file.closeStoreFile(true);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
+<span class="sourceLineNo">797</span><a name="line.797"></a>
+<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    Path getPath() {<a name="line.799"></a>
+<span class="sourceLineNo">800</span>      return file.getPath();<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
+<span class="sourceLineNo">802</span>  }<a name="line.802"></a>
+<span class="sourceLineNo">803</span>}<a name="line.803"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
index 0343488..a9dcefd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileConverter.html
@@ -46,733 +46,769 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.fs.PathFilter;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.Logger;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.LoggerFactory;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.53"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HConstants;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>/**<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * the state of the system.<a name="line.58"></a>
-<span class="sourceLineNo">059</span> */<a name="line.59"></a>
-<span class="sourceLineNo">060</span>@InterfaceAudience.Private<a name="line.60"></a>
-<span class="sourceLineNo">061</span>public class HFileArchiver {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static final String SEPARATOR = ".";<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /** Number of retries in case of fs operation failure */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      new Function&lt;File, Path&gt;() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        @Override<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        public Path apply(File file) {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>          return file == null ? null : file.getPath();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      };<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.76"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * the state of the system.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class HFileArchiver {<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final String SEPARATOR = ".";<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  /** Number of retries in case of fs operation failure */<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      new Function&lt;File, Path&gt;() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        @Override<a name="line.72"></a>
+<span class="sourceLineNo">073</span>        public Path apply(File file) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>          return file == null ? null : file.getPath();<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      };<a name="line.76"></a>
 <span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private HFileArchiver() {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    // hidden ctor since this is just a util<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * @return True if the Region exits in the filesystem.<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      throws IOException {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    return fs.exists(regionDir);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * @param conf the configuration to use<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @param fs the file system object<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   * @param info RegionInfo for region to be deleted<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   */<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      throws IOException {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          the archive path)<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *         operations could not complete.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if the request cannot be completed<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // otherwise, we archive the files<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    // make sure we can archive<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    if (tableDir == null || regionDir == null) {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      if (regionDir != null) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // the archived files correctly or not.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      return false;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.131"></a>
+<span class="sourceLineNo">078</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private HFileArchiver() {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    // hidden ctor since this is just a util<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * @return True if the Region exits in the filesystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      throws IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    return fs.exists(regionDir);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * @param conf the configuration to use<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   * @param fs the file system object<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @param info RegionInfo for region to be deleted<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      throws IOException {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   *          the archive path)<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *         operations could not complete.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the request cannot be completed<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    // otherwise, we archive the files<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // make sure we can archive<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    if (tableDir == null || regionDir == null) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      if (regionDir != null) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      // the archived files correctly or not.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      return false;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
 <span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    // make sure the regiondir lives under the tabledir<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        FSUtils.getTableName(tableDir),<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        regionDir.getName());<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    // otherwise, we attempt to archive the store files<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    // build collection of just the store directories to archive<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    PathFilter nonHidden = new PathFilter() {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      public boolean accept(Path file) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    };<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    // if there no files, we can just delete the directory and return;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    if (storeDirs == null) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // convert the files in the region to a File<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.debug("Archiving " + toArchive);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        EnvironmentEdgeManager.currentTime());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    if (!failedArchive.isEmpty()) {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      throw new FailedArchiveException(<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.165"></a>
-<span class="sourceLineNo">166</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    // if that was successful, then we delete the region<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Archive the specified regions in parallel.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf the configuration to use<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   *                            the archive path)<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   *                             path)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   *                                  path)<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * @throws IOException if the request cannot be completed<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    for (Path regionDir: regionDirList) {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        return null;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      });<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      futures.add(future);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    try {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        future.get();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    } catch (InterruptedException e) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      throw new InterruptedIOException(e.getMessage());<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    } catch (ExecutionException e) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      throw new IOException(e.getCause());<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    }<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    if (archiveExecutor == null) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        getThreadFactory());<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    return archiveExecutor;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  // issues in some tests.<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static ThreadFactory getThreadFactory() {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    return new ThreadFactory() {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>      @Override<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      public Thread newThread(Runnable r) {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        Thread t = new Thread(r, name);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        t.setDaemon(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        return t;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    };<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * Remove from the specified region the store files of the specified column family,<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * either by archiving them or outright deletion<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * @param fs the filesystem where the store files live<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   * @param parent Parent region hosting the store files<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * @param family the family hosting the store files<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Removes from the specified region the store files of the specified column family,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * either by archiving them or outright deletion<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @param fs the filesystem where the store files live<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.256"></a>
-<span class="sourceLineNo">257</span>   * @param parent Parent region hosting the store files<a name="line.257"></a>
-<span class="sourceLineNo">258</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.258"></a>
-<span class="sourceLineNo">259</span>   * @param family the family hosting the store files<a name="line.259"></a>
-<span class="sourceLineNo">260</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.260"></a>
-<span class="sourceLineNo">261</span>   */<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (storeFiles == null) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          Bytes.toString(family));<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    // do the actual archive<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.276"></a>
-<span class="sourceLineNo">277</span>        EnvironmentEdgeManager.currentTime());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    if (!failedArchive.isEmpty()){<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.287"></a>
-<span class="sourceLineNo">288</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.288"></a>
-<span class="sourceLineNo">289</span>   * @param fs the filesystem where the store files live<a name="line.289"></a>
-<span class="sourceLineNo">290</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * @param family the family hosting the store files<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      throws IOException, FailedArchiveException {<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (fs == null) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      return;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>    // short circuit if we don't have any files to delete<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    if (compactedFiles.isEmpty()) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>      LOG.debug("No files to dispose of, done!");<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      return;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>    // build the archive path<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        "Need to have a region and a family to archive from.");<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // otherwise we attempt to archive the store files<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    LOG.debug("Archiving compacted files.");<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>    // Wrap the storefile into a File<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Collection&lt;File&gt; storeFiles =<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>    // do the actual archive<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    List&lt;File&gt; failedArchive =<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>    if (!failedArchive.isEmpty()){<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.339"></a>
-<span class="sourceLineNo">340</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Archive the store file<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * @param fs the filesystem where the store files live<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * @param regionInfo region hosting the store files<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   * @param family the family hosting the store files<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param storeFile file to be archived<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
-<span class="sourceLineNo">356</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    }<a name="line.363"></a>
+<span class="sourceLineNo">133</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>    // make sure the regiondir lives under the tabledir<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        FSUtils.getTableName(tableDir),<a name="line.138"></a>
+<span class="sourceLineNo">139</span>        regionDir.getName());<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    // otherwise, we attempt to archive the store files<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>    // build collection of just the store directories to archive<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    PathFilter nonHidden = new PathFilter() {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      @Override<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      public boolean accept(Path file) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    };<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    // if there no files, we can just delete the directory and return;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (storeDirs == null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    // convert the files in the region to a File<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    LOG.debug("Archiving " + toArchive);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        EnvironmentEdgeManager.currentTime());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    if (!failedArchive.isEmpty()) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      throw new FailedArchiveException(<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    // if that was successful, then we delete the region<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Archive the specified regions in parallel.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf the configuration to use<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   *                            the archive path)<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   *                             path)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   *                                  path)<a name="line.184"></a>
+<span class="sourceLineNo">185</span>   * @throws IOException if the request cannot be completed<a name="line.185"></a>
+<span class="sourceLineNo">186</span>   */<a name="line.186"></a>
+<span class="sourceLineNo">187</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    for (Path regionDir: regionDirList) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        return null;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      });<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      futures.add(future);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    try {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>        future.get();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    } catch (InterruptedException e) {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      throw new InterruptedIOException(e.getMessage());<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    } catch (ExecutionException e) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      throw new IOException(e.getCause());<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    }<a name="line.205"></a>
+<span class="sourceLineNo">206</span>  }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (archiveExecutor == null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        getThreadFactory());<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    return archiveExecutor;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  // issues in some tests.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private static ThreadFactory getThreadFactory() {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    return new ThreadFactory() {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>      @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      public Thread newThread(Runnable r) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        Thread t = new Thread(r, name);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        t.setDaemon(true);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        return t;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    };<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * Remove from the specified region the store files of the specified column family,<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * either by archiving them or outright deletion<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   * @param fs the filesystem where the store files live<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   * @param parent Parent region hosting the store files<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * @param family the family hosting the store files<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  }<a name="line.252"></a>
+<span class="sourceLineNo">253</span><a name="line.253"></a>
+<span class="sourceLineNo">254</span>  /**<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * Removes from the specified region the store files of the specified column family,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * either by archiving them or outright deletion<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   * @param fs the filesystem where the store files live<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param parent Parent region hosting the store files<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param family the family hosting the store files<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   */<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    if (storeFiles == null) {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          Bytes.toString(family));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      return;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>    // do the actual archive<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        EnvironmentEdgeManager.currentTime());<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (!failedArchive.isEmpty()){<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.282"></a>
+<span class="sourceLineNo">283</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    }<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>  /**<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.290"></a>
+<span class="sourceLineNo">291</span>   * @param fs the filesystem where the store files live<a name="line.291"></a>
+<span class="sourceLineNo">292</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.292"></a>
+<span class="sourceLineNo">293</span>   * @param family the family hosting the store files<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.295"></a>
+<span class="sourceLineNo">296</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   */<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throws IOException {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    archive(fs, regionInfo, family, compactedFiles, storeArchiveDir);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  }<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /**<a name="line.305"></a>
+<span class="sourceLineNo">306</span>   * Archive recovered edits using existing logic for archiving store files. This is currently only<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * relevant when &lt;b&gt;hbase.region.archive.recovered.edits&lt;/b&gt; is true, as recovered edits shouldn't<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   * be kept after replay. In theory, we could use very same method available for archiving<a name="line.308"></a>
+<span class="sourceLineNo">309</span>   * store files, but supporting WAL dir and store files on different FileSystems added the need for<a name="line.309"></a>
+<span class="sourceLineNo">310</span>   * extra validation of the passed FileSystem instance and the path where the archiving edits<a name="line.310"></a>
+<span class="sourceLineNo">311</span>   * should be placed.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>   * @param conf {@link Configuration} to determine the archive directory.<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * @param fs the filesystem used for storing WAL files.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * @param regionInfo {@link RegionInfo} a pseudo region representation for the archiving logic.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * @param family a pseudo familiy representation for the archiving logic.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * @param replayedEdits the recovered edits to be archived.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @throws IOException if files can't be achived due to some internal error.<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  public static void archiveRecoveredEdits(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    byte[] family, Collection&lt;HStoreFile&gt; replayedEdits)<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    throws IOException {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    String workingDir = conf.get(CommonFSUtils.HBASE_WAL_DIR, conf.get(HConstants.HBASE_DIR));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    //extra sanity checks for the right FS<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    Path path = new Path(workingDir);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    if(path.isAbsoluteAndSchemeAuthorityNull()){<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      //no schema specified on wal dir value, so it's on same FS as StoreFiles<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      path = new Path(conf.get(HConstants.HBASE_DIR));<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    if(path.toUri().getScheme()!=null &amp;&amp; !path.toUri().getScheme().equals(fs.getScheme())){<a name="line.329"></a>
+<span class="sourceLineNo">330</span>      throw new IOException("Wrong file system! Should be " + path.toUri().getScheme() +<a name="line.330"></a>
+<span class="sourceLineNo">331</span>        ", but got " +  fs.getScheme());<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    path = HFileArchiveUtil.getStoreArchivePathForRootDir(path, regionInfo, family);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    archive(fs, regionInfo, family, replayedEdits, path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  private static void archive(FileSystem fs, RegionInfo regionInfo, byte[] family,<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    Collection&lt;HStoreFile&gt; compactedFiles, Path storeArchiveDir) throws IOException {<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    if (fs == null) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.341"></a>
+<span class="sourceLineNo">342</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      return;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>    // short circuit if we don't have any files to delete<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    if (compactedFiles.isEmpty()) {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      LOG.debug("No files to dispose of, done!");<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      return;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    }<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // build the archive path<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        "Need to have a region and a family to archive from.");<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.358"></a>
+<span class="sourceLineNo">359</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>    // otherwise we attempt to archive the store files<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    LOG.debug("Archiving compacted files.");<a name="line.363"></a>
 <span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>    // do the actual archive<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    File file = new FileablePath(fs, storeFile);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.369"></a>
-<span class="sourceLineNo">370</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.370"></a>
-<span class="sourceLineNo">371</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * renaming of the existing file and then archive the passed in files.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   *          the files to archive are directories, will append the name of the<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *          directory to the base archive directory name, creating a parallel<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   *          structure.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param toArchive files/directories that need to be archvied<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param start time the archiving started - used for resolving archive<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   *          conflicts.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @return the list of failed to archive files.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    // short circuit if no files to move<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    if (toArchive.isEmpty()) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      return Collections.emptyList();<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span><a name="line.395"></a>
-<span class="sourceLineNo">396</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // make sure the archive directory exists<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.401"></a>
-<span class="sourceLineNo">402</span>            + ", quitting archive attempt.");<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    String startTime = Long.toString(start);<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    for (File file : toArchive) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // if its a file archive it<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      try {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        LOG.trace("Archiving {}", file);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>        if (file.isFile()) {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>          // attempt to archive the file<a name="line.414"></a>
-<span class="sourceLineNo">415</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            failures.add(file);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          }<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        } else {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // otherwise its a directory and we need to archive all files<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // so we add the directory name to the one base archive<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // and then get all the files from that directory and attempt to<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          // archive those too<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.426"></a>
-<span class="sourceLineNo">427</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        }<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      } catch (IOException e) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        failures.add(file);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      }<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    return failures;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>  /**<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>   * &lt;p&gt;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * the archive directory and the new file is put in its place.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.443"></a>
-<span class="sourceLineNo">444</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.444"></a>
-<span class="sourceLineNo">445</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   *         problem, but the operation still completed.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      String archiveStartTime) throws IOException {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    // build path as it should be in the archive<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String filename = currentFile.getName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    // is included just for that 1 in trillion chance.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    if (fs.exists(archiveFile)) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.460"></a>
-<span class="sourceLineNo">461</span>          "overwriting current.", archiveFile);<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      // move the archive file to the stamped backup<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            + ", deleting existing file in favor of newer.");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        // try to delete the exisiting file, if we can't rename it<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        if (!fs.delete(archiveFile, false)) {<a name="line.469"></a>
-<span class="sourceLineNo">470</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.471"></a>
-<span class="sourceLineNo">472</span>              + ") to make room for similarly named file.");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    }<a name="line.476"></a>
-<span class="sourceLineNo">477</span><a name="line.477"></a>
-<span class="sourceLineNo">478</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    // at this point, we should have a free spot for the archive file<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    boolean success = false;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      if (i &gt; 0) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>        // Ensure that the archive directory exists.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        // The previous "move to archive" operation has failed probably because<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        try {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>          if (!fs.exists(archiveDir)) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            if (fs.mkdirs(archiveDir)) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>            }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        } catch (IOException e) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>        }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      }<a name="line.497"></a>
+<span class="sourceLineNo">365</span>    // Wrap the storefile into a File<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    Collection&lt;File&gt; storeFiles =<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    // do the actual archive<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    List&lt;File&gt; failedArchive =<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.372"></a>
+<span class="sourceLineNo">373</span><a name="line.373"></a>
+<span class="sourceLineNo">374</span>    if (!failedArchive.isEmpty()){<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * Archive the store file<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * @param fs the filesystem where the store files live<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * @param regionInfo region hosting the store files<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.387"></a>
+<span class="sourceLineNo">388</span>   * @param family the family hosting the store files<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * @param storeFile file to be archived<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   */<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.397"></a>
+<span class="sourceLineNo">398</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    }<a name="line.399"></a>
+<span class="sourceLineNo">400</span><a name="line.400"></a>
+<span class="sourceLineNo">401</span>    // do the actual archive<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    File file = new FileablePath(fs, storeFile);<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.405"></a>
+<span class="sourceLineNo">406</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.406"></a>
+<span class="sourceLineNo">407</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    }<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  }<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * renaming of the existing file and then archive the passed in files.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   *          the files to archive are directories, will append the name of the<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   *          directory to the base archive directory name, creating a parallel<a name="line.417"></a>
+<span class="sourceLineNo">418</span>   *          structure.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>   * @param toArchive files/directories that need to be archvied<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * @param start time the archiving started - used for resolving archive<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   *          conflicts.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * @return the list of failed to archive files.<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    // short circuit if no files to move<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    if (toArchive.isEmpty()) {<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      return Collections.emptyList();<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    }<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    // make sure the archive directory exists<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.437"></a>
+<span class="sourceLineNo">438</span>            + ", quitting archive attempt.");<a name="line.438"></a>
+<span class="sourceLineNo">439</span>      }<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    String startTime = Long.toString(start);<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    for (File file : toArchive) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      // if its a file archive it<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      try {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        LOG.trace("Archiving {}", file);<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        if (file.isFile()) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          // attempt to archive the file<a name="line.450"></a>
+<span class="sourceLineNo">451</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>            failures.add(file);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          // otherwise its a directory and we need to archive all files<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.457"></a>
+<span class="sourceLineNo">458</span>          // so we add the directory name to the one base archive<a name="line.458"></a>
+<span class="sourceLineNo">459</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.459"></a>
+<span class="sourceLineNo">460</span>          // and then get all the files from that directory and attempt to<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          // archive those too<a name="line.461"></a>
+<span class="sourceLineNo">462</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        }<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      } catch (IOException e) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        failures.add(file);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      }<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    }<a name="line.469"></a>
+<span class="sourceLineNo">470</span>    return failures;<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * &lt;p&gt;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * the archive directory and the new file is put in its place.<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   *         problem, but the operation still completed.<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      String archiveStartTime) throws IOException {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>    // build path as it should be in the archive<a name="line.487"></a>
+<span class="sourceLineNo">488</span>    String filename = currentFile.getName();<a name="line.488"></a>
+<span class="sourceLineNo">489</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.490"></a>
+<span class="sourceLineNo">491</span><a name="line.491"></a>
+<span class="sourceLineNo">492</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    // is included just for that 1 in trillion chance.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    if (fs.exists(archiveFile)) {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.496"></a>
+<span class="sourceLineNo">497</span>          "overwriting current.", archiveFile);<a name="line.497"></a>
 <span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>      try {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      } catch (FileNotFoundException fnfe) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.502"></a>
-<span class="sourceLineNo">503</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        success = true;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      } catch (IOException e) {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        success = false;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      }<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    if (!success) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      LOG.error("Failed to archive " + currentFile);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return false;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
+<span class="sourceLineNo">499</span>      // move the archive file to the stamped backup<a name="line.499"></a>
+<span class="sourceLineNo">500</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.502"></a>
+<span class="sourceLineNo">503</span>            + ", deleting existing file in favor of newer.");<a name="line.503"></a>
+<span class="sourceLineNo">504</span>        // try to delete the exisiting file, if we can't rename it<a name="line.504"></a>
+<span class="sourceLineNo">505</span>        if (!fs.delete(archiveFile, false)) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.506"></a>
+<span class="sourceLineNo">507</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.507"></a>
+<span class="sourceLineNo">508</span>              + ") to make room for similarly named file.");<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        }<a name="line.509"></a>
+<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>    }<a name="line.512"></a>
+<span class="sourceLineNo">513</span><a name="line.513"></a>
+<span class="sourceLineNo">514</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.514"></a>
 <span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return true;<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
-<span class="sourceLineNo">519</span><a name="line.519"></a>
-<span class="sourceLineNo">520</span>  /**<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param fs FileSystem from which to delete the region<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @throws IOException on filesystem operation failure<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    if (fs.delete(regionDir, true)) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      LOG.debug("Deleted {}", regionDir);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      return true;<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    }<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    return false;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * Just do a simple delete of the given store files<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * &lt;p&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * &lt;p&gt;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @param compactedFiles store files to delete from the file system.<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   *           throwing the exception, rather than failing at the first file.<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      throws IOException {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    LOG.debug("Deleting files without archiving.");<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      try {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>        hsf.deleteStoreFile();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      } catch (IOException e) {<a name="line.553"></a>
-<span class="sourceLineNo">554</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.554"></a>
-<span class="sourceLineNo">555</span>        errors.add(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    if (errors.size() &gt; 0) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      throw MultipleIOException.createIOException(errors);<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>  }<a name="line.561"></a>
-<span class="sourceLineNo">562</span><a name="line.562"></a>
-<span class="sourceLineNo">563</span>  /**<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * archival/removal of files<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    protected final FileSystem fs;<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    public FileConverter(FileSystem fs) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      this.fs = fs;<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    }<a name="line.573"></a>
-<span class="sourceLineNo">574</span>  }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>  /**<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   */<a name="line.578"></a>
-<span class="sourceLineNo">579</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    public FileStatusConverter(FileSystem fs) {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      super(fs);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    }<a name="line.582"></a>
-<span class="sourceLineNo">583</span><a name="line.583"></a>
-<span class="sourceLineNo">584</span>    @Override<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    public File apply(FileStatus input) {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      return new FileablePath(fs, input.getPath());<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    }<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * methods<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   */<a name="line.593"></a>
-<span class="sourceLineNo">594</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    public StoreToFile(FileSystem fs) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      super(fs);<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
+<span class="sourceLineNo">516</span>    // at this point, we should have a free spot for the archive file<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    boolean success = false;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>      if (i &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        // Ensure that the archive directory exists.<a name="line.520"></a>
+<span class="sourceLineNo">521</span>        // The previous "move to archive" operation has failed probably because<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          if (!fs.exists(archiveDir)) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>            if (fs.mkdirs(archiveDir)) {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.527"></a>
+<span class="sourceLineNo">528</span>            }<a name="line.528"></a>
+<span class="sourceLineNo">529</span>          }<a name="line.529"></a>
+<span class="sourceLineNo">530</span>        } catch (IOException e) {<a name="line.530"></a>
+<span class="sourceLineNo">531</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      }<a name="line.533"></a>
+<span class="sourceLineNo">534</span><a name="line.534"></a>
+<span class="sourceLineNo">535</span>      try {<a name="line.535"></a>
+<span class="sourceLineNo">536</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      } catch (FileNotFoundException fnfe) {<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.538"></a>
+<span class="sourceLineNo">539</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        success = true;<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      } catch (IOException e) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>        success = false;<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      }<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!success) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      LOG.error("Failed to archive " + currentFile);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      return false;<a name="line.549"></a>
+<span class="sourceLineNo">550</span>    }<a name="line.550"></a>
+<span class="sourceLineNo">551</span><a name="line.551"></a>
+<span class="sourceLineNo">552</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>    return true;<a name="line.553"></a>
+<span class="sourceLineNo">554</span>  }<a name="line.554"></a>
+<span class="sourceLineNo">555</span><a name="line.555"></a>
+<span class="sourceLineNo">556</span>  /**<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param fs FileSystem from which to delete the region<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException on filesystem operation failure<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      throws IOException {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    if (fs.delete(regionDir, true)) {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>      LOG.debug("Deleted {}", regionDir);<a name="line.566"></a>
+<span class="sourceLineNo">567</span>      return true;<a name="line.567"></a>
+<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    return false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span>  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>  /**<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   * Just do a simple delete of the given store files<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   * &lt;p&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   * &lt;p&gt;<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   * @param compactedFiles store files to delete from the file system.<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   *           throwing the exception, rather than failing at the first file.<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      throws IOException {<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    LOG.debug("Deleting files without archiving.");<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
+<span class="sourceLineNo">588</span>        hsf.deleteStoreFile();<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      } catch (IOException e) {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        errors.add(e);<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      }<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    }<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    if (errors.size() &gt; 0) {<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      throw MultipleIOException.createIOException(errors);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
+<span class="sourceLineNo">597</span>  }<a name="line.597"></a>
 <span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    @Override<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    public File apply(HStoreFile input) {<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      return new FileableStoreFile(fs, input);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    }<a name="line.602"></a>
-<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
-<span class="sourceLineNo">604</span><a name="line.604"></a>
-<span class="sourceLineNo">605</span>  /**<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   * Wrapper to handle file operations uniformly<a name="line.606"></a>
-<span class="sourceLineNo">607</span>   */<a name="line.607"></a>
-<span class="sourceLineNo">608</span>  private static abstract class File {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    protected final FileSystem fs;<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public File(FileSystem fs) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      this.fs = fs;<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>     * Delete the file<a name="line.616"></a>
-<span class="sourceLineNo">617</span>     * @throws IOException on failure<a name="line.617"></a>
-<span class="sourceLineNo">618</span>     */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    abstract void delete() throws IOException;<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    /**<a name="line.621"></a>
-<span class="sourceLineNo">622</span>     * Check to see if this is a file or a directory<a name="line.622"></a>
-<span class="sourceLineNo">623</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.623"></a>
-<span class="sourceLineNo">624</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.624"></a>
-<span class="sourceLineNo">625</span>     */<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    abstract boolean isFile() throws IOException;<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>    /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>     * @return if this is a directory, returns all the children in the<a name="line.629"></a>
-<span class="sourceLineNo">630</span>     *         directory, otherwise returns an empty list<a name="line.630"></a>
-<span class="sourceLineNo">631</span>     * @throws IOException<a name="line.631"></a>
-<span class="sourceLineNo">632</span>     */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.633"></a>
+<span class="sourceLineNo">599</span>  /**<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * archival/removal of files<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
+<span class="sourceLineNo">604</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    protected final FileSystem fs;<a name="line.605"></a>
+<span class="sourceLineNo">606</span><a name="line.606"></a>
+<span class="sourceLineNo">607</span>    public FileConverter(FileSystem fs) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      this.fs = fs;<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  /**<a name="line.612"></a>
+<span class="sourceLineNo">613</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   */<a name="line.614"></a>
+<span class="sourceLineNo">615</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>    public FileStatusConverter(FileSystem fs) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      super(fs);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span><a name="line.619"></a>
+<span class="sourceLineNo">620</span>    @Override<a name="line.620"></a>
+<span class="sourceLineNo">621</span>    public File apply(FileStatus input) {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      return new FileablePath(fs, input.getPath());<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * methods<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   */<a name="line.629"></a>
+<span class="sourceLineNo">630</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>    public StoreToFile(FileSystem fs) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      super(fs);<a name="line.632"></a>
+<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
 <span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * close any outside readers of the file<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * @throws IOException<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     */<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    abstract void close() throws IOException;<a name="line.639"></a>
+<span class="sourceLineNo">635</span>    @Override<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    public File apply(HStoreFile input) {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      return new FileableStoreFile(fs, input);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>  }<a name="line.639"></a>
 <span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    /**<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     *         file name)<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     */<a name="line.644"></a>
-<span class="sourceLineNo">645</span>    abstract String getName();<a name="line.645"></a>
+<span class="sourceLineNo">641</span>  /**<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * Wrapper to handle file operations uniformly<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  private static abstract class File {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>    protected final FileSystem fs;<a name="line.645"></a>
 <span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    /**<a name="line.647"></a>
-<span class="sourceLineNo">648</span>     * @return the path to this file<a name="line.648"></a>
-<span class="sourceLineNo">649</span>     */<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    abstract Path getPath();<a name="line.650"></a>
-<span class="sourceLineNo">651</span><a name="line.651"></a>
-<span class="sourceLineNo">652</span>    /**<a name="line.652"></a>
-<span class="sourceLineNo">653</span>     * Move the file to the given destination<a name="line.653"></a>
-<span class="sourceLineNo">654</span>     * @param dest<a name="line.654"></a>
-<span class="sourceLineNo">655</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.655"></a>
-<span class="sourceLineNo">656</span>     * @throws IOException<a name="line.656"></a>
-<span class="sourceLineNo">657</span>     */<a name="line.657"></a>
-<span class="sourceLineNo">658</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      this.close();<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      Path p = this.getPath();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    }<a name="line.662"></a>
+<span class="sourceLineNo">647</span>    public File(FileSystem fs) {<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      this.fs = fs;<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    /**<a name="line.651"></a>
+<span class="sourceLineNo">652</span>     * Delete the file<a name="line.652"></a>
+<span class="sourceLineNo">653</span>     * @throws IOException on failure<a name="line.653"></a>
+<span class="sourceLineNo">654</span>     */<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    abstract void delete() throws IOException;<a name="line.655"></a>
+<span class="sourceLineNo">656</span><a name="line.656"></a>
+<span class="sourceLineNo">657</span>    /**<a name="line.657"></a>
+<span class="sourceLineNo">658</span>     * Check to see if this is a file or a directory<a name="line.658"></a>
+<span class="sourceLineNo">659</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.659"></a>
+<span class="sourceLineNo">660</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.660"></a>
+<span class="sourceLineNo">661</span>     */<a name="line.661"></a>
+<span class="sourceLineNo">662</span>    abstract boolean isFile() throws IOException;<a name="line.662"></a>
 <span class="sourceLineNo">663</span><a name="line.663"></a>
 <span class="sourceLineNo">664</span>    /**<a name="line.664"></a>
-<span class="sourceLineNo">665</span>     * @return the {@link FileSystem} on which this file resides<a name="line.665"></a>
-<span class="sourceLineNo">666</span>     */<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public FileSystem getFileSystem() {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return this.fs;<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
+<span class="sourceLineNo">665</span>     * @return if this is a directory, returns all the children in the<a name="line.665"></a>
+<span class="sourceLineNo">666</span>     *         directory, otherwise returns an empty list<a name="line.666"></a>
+<span class="sourceLineNo">667</span>     * @throws IOException<a name="line.667"></a>
+<span class="sourceLineNo">668</span>     */<a name="line.668"></a>
+<span class="sourceLineNo">669</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.669"></a>
 <span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    @Override<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public String toString() {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    }<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
+<span class="sourceLineNo">671</span>    /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>     * close any outside readers of the file<a name="line.672"></a>
+<span class="sourceLineNo">673</span>     * @throws IOException<a name="line.673"></a>
+<span class="sourceLineNo">674</span>     */<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    abstract void close() throws IOException;<a name="line.675"></a>
 <span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  /**<a name="line.677"></a>
-<span class="sourceLineNo">678</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.678"></a>
-<span class="sourceLineNo">679</span>   */<a name="line.679"></a>
-<span class="sourceLineNo">680</span>  private static class FileablePath extends File {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    private final Path file;<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    private final FileStatusConverter getAsFile;<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      super(fs);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>      this.file = file;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>    @Override<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public void delete() throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    @Override<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    public String getName() {<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      return file.getName();<a name="line.697"></a>
+<span class="sourceLineNo">677</span>    /**<a name="line.677"></a>
+<span class="sourceLineNo">678</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.678"></a>
+<span class="sourceLineNo">679</span>     *         file name)<a name="line.679"></a>
+<span class="sourceLineNo">680</span>     */<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    abstract String getName();<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    /**<a name="line.683"></a>
+<span class="sourceLineNo">684</span>     * @return the path to this file<a name="line.684"></a>
+<span class="sourceLineNo">685</span>     */<a name="line.685"></a>
+<span class="sourceLineNo">686</span>    abstract Path getPath();<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>    /**<a name="line.688"></a>
+<span class="sourceLineNo">689</span>     * Move the file to the given destination<a name="line.689"></a>
+<span class="sourceLineNo">690</span>     * @param dest<a name="line.690"></a>
+<span class="sourceLineNo">691</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.691"></a>
+<span class="sourceLineNo">692</span>     * @throws IOException<a name="line.692"></a>
+<span class="sourceLineNo">693</span>     */<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>      this.close();<a name="line.695"></a>
+<span class="sourceLineNo">696</span>      Path p = this.getPath();<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.697"></a>
 <span class="sourceLineNo">698</span>    }<a name="line.698"></a>
 <span class="sourceLineNo">699</span><a name="line.699"></a>
-<span class="sourceLineNo">700</span>    @Override<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      if (fs.isFile(file)) {<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        return Collections.emptyList();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public boolean isFile() throws IOException {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return fs.isFile(file);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
+<span class="sourceLineNo">700</span>    /**<a name="line.700"></a>
+<span class="sourceLineNo">701</span>     * @return the {@link FileSystem} on which this file resides<a name="line.701"></a>
+<span class="sourceLineNo">702</span>     */<a name="line.702"></a>
+<span class="sourceLineNo">703</span>    public FileSystem getFileSystem() {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>      return this.fs;<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
+<span class="sourceLineNo">706</span><a name="line.706"></a>
+<span class="sourceLineNo">707</span>    @Override<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    public String toString() {<a name="line.708"></a>
+<span class="sourceLineNo">709</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  }<a name="line.711"></a>
 <span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public void close() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      // NOOP - files are implicitly closed on removal<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    Path getPath() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return file;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  /**<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * .<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   */<a name="line.727"></a>
-<span class="sourceLineNo">728</span>  private static class FileableStoreFile extends File {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    HStoreFile file;<a name="line.729"></a>
+<span class="sourceLineNo">713</span>  /**<a name="line.713"></a>
+<span class="sourceLineNo">714</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.714"></a>
+<span class="sourceLineNo">715</span>   */<a name="line.715"></a>
+<span class="sourceLineNo">716</span>  private static class FileablePath extends File {<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    private final Path file;<a name="line.717"></a>
+<span class="sourceLineNo">718</span>    private final FileStatusConverter getAsFile;<a name="line.718"></a>
+<span class="sourceLineNo">719</span><a name="line.719"></a>
+<span class="sourceLineNo">720</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      super(fs);<a name="line.721"></a>
+<span class="sourceLineNo">722</span>      this.file = file;<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    }<a name="line.724"></a>
+<span class="sourceLineNo">725</span><a name="line.725"></a>
+<span class="sourceLineNo">726</span>    @Override<a name="line.726"></a>
+<span class="sourceLineNo">727</span>    public void delete() throws IOException {<a name="line.727"></a>
+<span class="sourceLineNo">728</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
 <span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      super(fs);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      this.file = store;<a name="line.733"></a>
+<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    public String getName() {<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      return file.getName();<a name="line.733"></a>
 <span class="sourceLineNo">734</span>    }<a name="line.734"></a>
 <span class="sourceLineNo">735</span><a name="line.735"></a>
 <span class="sourceLineNo">736</span>    @Override<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    public void delete() throws IOException {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      file.deleteStoreFile();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>    @Override<a name="line.741"></a>
-<span class="sourceLineNo">742</span>    public String getName() {<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      return file.getPath().getName();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public boolean isFile() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      return true;<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>    @Override<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      // storefiles don't have children<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      return Collections.emptyList();<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    }<a name="line.755"></a>
-<span class="sourceLineNo">756</span><a name="line.756"></a>
-<span class="sourceLineNo">757</span>    @Override<a name="line.757"></a>
-<span class="sourceLineNo">758</span>    public void close() throws IOException {<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      file.closeStoreFile(true);<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    Path getPath() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return file.getPath();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>}<a name="line.767"></a>
+<span class="sourceLineNo">737</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.737"></a>
+<span class="sourceLineNo">738</span>      if (fs.isFile(file)) {<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        return Collections.emptyList();<a name="line.739"></a>
+<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
+<span class="sourceLineNo">741</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>    @Override<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    public boolean isFile() throws IOException {<a name="line.745"></a>
+<span class="sourceLineNo">746</span>      return fs.isFile(file);<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    }<a name="line.747"></a>
+<span class="sourceLineNo">748</span><a name="line.748"></a>
+<span class="sourceLineNo">749</span>    @Override<a name="line.749"></a>
+<span class="sourceLineNo">750</span>    public void close() throws IOException {<a name="line.750"></a>
+<span class="sourceLineNo">751</span>      // NOOP - files are implicitly closed on removal<a name="line.751"></a>
+<span class="sourceLineNo">752</span>    }<a name="line.752"></a>
+<span class="sourceLineNo">753</span><a name="line.753"></a>
+<span class="sourceLineNo">754</span>    @Override<a name="line.754"></a>
+<span class="sourceLineNo">755</span>    Path getPath() {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>      return file;<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    }<a name="line.757"></a>
+<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
+<span class="sourceLineNo">759</span><a name="line.759"></a>
+<span class="sourceLineNo">760</span>  /**<a name="line.760"></a>
+<span class="sourceLineNo">761</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * .<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   */<a name="line.763"></a>
+<span class="sourceLineNo">764</span>  private static class FileableStoreFile extends File {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>    HStoreFile file;<a name="line.765"></a>
+<span class="sourceLineNo">766</span><a name="line.766"></a>
+<span class="sourceLineNo">767</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      super(fs);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      this.file = store;<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    }<a name="line.770"></a>
+<span class="sourceLineNo">771</span><a name="line.771"></a>
+<span class="sourceLineNo">772</span>    @Override<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    public void delete() throws IOException {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      file.deleteStoreFile();<a name="line.774"></a>
+<span class="sourceLineNo">775</span>    }<a name="line.775"></a>
+<span class="sourceLineNo">776</span><a name="line.776"></a>
+<span class="sourceLineNo">777</span>    @Override<a name="line.777"></a>
+<span class="sourceLineNo">778</span>    public String getName() {<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      return file.getPath().getName();<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
+<span class="sourceLineNo">781</span><a name="line.781"></a>
+<span class="sourceLineNo">782</span>    @Override<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    public boolean isFile() {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>      return true;<a name="line.784"></a>
+<span class="sourceLineNo">785</span>    }<a name="line.785"></a>
+<span class="sourceLineNo">786</span><a name="line.786"></a>
+<span class="sourceLineNo">787</span>    @Override<a name="line.787"></a>
+<span class="sourceLineNo">788</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.788"></a>
+<span class="sourceLineNo">789</span>      // storefiles don't have children<a name="line.789"></a>
+<span class="sourceLineNo">790</span>      return Collections.emptyList();<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
+<span class="sourceLineNo">792</span><a name="line.792"></a>
+<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
+<span class="sourceLineNo">794</span>    public void close() throws IOException {<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      file.closeStoreFile(true);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
+<span class="sourceLineNo">797</span><a name="line.797"></a>
+<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    Path getPath() {<a name="line.799"></a>
+<span class="sourceLineNo">800</span>      return file.getPath();<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
+<span class="sourceLineNo">802</span>  }<a name="line.802"></a>
+<span class="sourceLineNo">803</span>}<a name="line.803"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
index 0343488..a9dcefd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileStatusConverter.html
@@ -46,733 +46,769 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.fs.PathFilter;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.Logger;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.LoggerFactory;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.53"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HConstants;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>/**<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * the state of the system.<a name="line.58"></a>
-<span class="sourceLineNo">059</span> */<a name="line.59"></a>
-<span class="sourceLineNo">060</span>@InterfaceAudience.Private<a name="line.60"></a>
-<span class="sourceLineNo">061</span>public class HFileArchiver {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static final String SEPARATOR = ".";<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /** Number of retries in case of fs operation failure */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      new Function&lt;File, Path&gt;() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        @Override<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        public Path apply(File file) {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>          return file == null ? null : file.getPath();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      };<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.76"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * the state of the system.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class HFileArchiver {<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final String SEPARATOR = ".";<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  /** Number of retries in case of fs operation failure */<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      new Function&lt;File, Path&gt;() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        @Override<a name="line.72"></a>
+<span class="sourceLineNo">073</span>        public Path apply(File file) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>          return file == null ? null : file.getPath();<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      };<a name="line.76"></a>
 <span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private HFileArchiver() {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    // hidden ctor since this is just a util<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * @return True if the Region exits in the filesystem.<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      throws IOException {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    return fs.exists(regionDir);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * @param conf the configuration to use<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @param fs the file system object<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   * @param info RegionInfo for region to be deleted<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   */<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      throws IOException {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          the archive path)<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *         operations could not complete.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if the request cannot be completed<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // otherwise, we archive the files<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    // make sure we can archive<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    if (tableDir == null || regionDir == null) {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      if (regionDir != null) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // the archived files correctly or not.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      return false;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.131"></a>
+<span class="sourceLineNo">078</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private HFileArchiver() {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    // hidden ctor since this is just a util<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * @return True if the Region exits in the filesystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      throws IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    return fs.exists(regionDir);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * @param conf the configuration to use<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   * @param fs the file system object<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @param info RegionInfo for region to be deleted<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      throws IOException {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   *          the archive path)<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *         operations could not complete.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the request cannot be completed<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    // otherwise, we archive the files<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // make sure we can archive<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    if (tableDir == null || regionDir == null) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      if (regionDir != null) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      // the archived files correctly or not.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      return false;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
 <span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    // make sure the regiondir lives under the tabledir<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        FSUtils.getTableName(tableDir),<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        regionDir.getName());<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    // otherwise, we attempt to archive the store files<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    // build collection of just the store directories to archive<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    PathFilter nonHidden = new PathFilter() {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      public boolean accept(Path file) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    };<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    // if there no files, we can just delete the directory and return;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    if (storeDirs == null) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // convert the files in the region to a File<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.debug("Archiving " + toArchive);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        EnvironmentEdgeManager.currentTime());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    if (!failedArchive.isEmpty()) {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      throw new FailedArchiveException(<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.165"></a>
-<span class="sourceLineNo">166</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    // if that was successful, then we delete the region<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Archive the specified regions in parallel.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf the configuration to use<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   *                            the archive path)<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   *                             path)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   *                                  path)<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * @throws IOException if the request cannot be completed<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    for (Path regionDir: regionDirList) {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        return null;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      });<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      futures.add(future);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    try {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        future.get();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    } catch (InterruptedException e) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      throw new InterruptedIOException(e.getMessage());<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    } catch (ExecutionException e) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      throw new IOException(e.getCause());<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    }<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    if (archiveExecutor == null) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        getThreadFactory());<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    return archiveExecutor;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  // issues in some tests.<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static ThreadFactory getThreadFactory() {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    return new ThreadFactory() {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>      @Override<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      public Thread newThread(Runnable r) {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        Thread t = new Thread(r, name);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        t.setDaemon(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        return t;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    };<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * Remove from the specified region the store files of the specified column family,<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * either by archiving them or outright deletion<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * @param fs the filesystem where the store files live<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   * @param parent Parent region hosting the store files<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * @param family the family hosting the store files<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Removes from the specified region the store files of the specified column family,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * either by archiving them or outright deletion<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @param fs the filesystem where the store files live<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.256"></a>
-<span class="sourceLineNo">257</span>   * @param parent Parent region hosting the store files<a name="line.257"></a>
-<span class="sourceLineNo">258</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.258"></a>
-<span class="sourceLineNo">259</span>   * @param family the family hosting the store files<a name="line.259"></a>
-<span class="sourceLineNo">260</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.260"></a>
-<span class="sourceLineNo">261</span>   */<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (storeFiles == null) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          Bytes.toString(family));<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    // do the actual archive<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.276"></a>
-<span class="sourceLineNo">277</span>        EnvironmentEdgeManager.currentTime());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    if (!failedArchive.isEmpty()){<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.287"></a>
-<span class="sourceLineNo">288</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.288"></a>
-<span class="sourceLineNo">289</span>   * @param fs the filesystem where the store files live<a name="line.289"></a>
-<span class="sourceLineNo">290</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * @param family the family hosting the store files<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      throws IOException, FailedArchiveException {<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (fs == null) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      return;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>    // short circuit if we don't have any files to delete<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    if (compactedFiles.isEmpty()) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>      LOG.debug("No files to dispose of, done!");<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      return;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>    // build the archive path<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        "Need to have a region and a family to archive from.");<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // otherwise we attempt to archive the store files<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    LOG.debug("Archiving compacted files.");<a name="line.327"></a>
-<span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>    // Wrap the storefile into a File<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    Collection&lt;File&gt; storeFiles =<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>    // do the actual archive<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    List&lt;File&gt; failedArchive =<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>    if (!failedArchive.isEmpty()){<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.339"></a>
-<span class="sourceLineNo">340</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Archive the store file<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * @param fs the filesystem where the store files live<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * @param regionInfo region hosting the store files<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   * @param family the family hosting the store files<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param storeFile file to be archived<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
-<span class="sourceLineNo">356</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    }<a name="line.363"></a>
+<span class="sourceLineNo">133</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>    // make sure the regiondir lives under the tabledir<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        FSUtils.getTableName(tableDir),<a name="line.138"></a>
+<span class="sourceLineNo">139</span>        regionDir.getName());<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    // otherwise, we attempt to archive the store files<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>    // build collection of just the store directories to archive<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    Collection&lt;File&gt; toArchive = new ArrayList&lt;&gt;();<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    final PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    PathFilter nonHidden = new PathFilter() {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      @Override<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      public boolean accept(Path file) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        return dirFilter.accept(file) &amp;&amp; !file.getName().startsWith(".");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    };<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    FileStatus[] storeDirs = FSUtils.listStatus(fs, regionDir, nonHidden);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    // if there no files, we can just delete the directory and return;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (storeDirs == null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      LOG.debug("Directory {} empty.", regionDir);<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    // convert the files in the region to a File<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    Stream.of(storeDirs).map(getAsFile).forEachOrdered(toArchive::add);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    LOG.debug("Archiving " + toArchive);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, regionArchiveDir, toArchive,<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        EnvironmentEdgeManager.currentTime());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    if (!failedArchive.isEmpty()) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      throw new FailedArchiveException(<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        "Failed to archive/delete all the files for region:" + regionDir.getName() + " into " +<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          regionArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    // if that was successful, then we delete the region<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return deleteRegionWithoutArchiving(fs, regionDir);<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Archive the specified regions in parallel.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf the configuration to use<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   *                            the archive path)<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   *                             path)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * @param regionDirList {@link Path} to where regions are being stored (for building the archive<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   *                                  path)<a name="line.184"></a>
+<span class="sourceLineNo">185</span>   * @throws IOException if the request cannot be completed<a name="line.185"></a>
+<span class="sourceLineNo">186</span>   */<a name="line.186"></a>
+<span class="sourceLineNo">187</span>  public static void archiveRegions(Configuration conf, FileSystem fs, Path rootDir, Path tableDir,<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    List&lt;Path&gt; regionDirList) throws IOException {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    List&lt;Future&lt;Void&gt;&gt; futures = new ArrayList&lt;&gt;(regionDirList.size());<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    for (Path regionDir: regionDirList) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      Future&lt;Void&gt; future = getArchiveExecutor(conf).submit(() -&gt; {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        archiveRegion(fs, rootDir, tableDir, regionDir);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        return null;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      });<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      futures.add(future);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    try {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      for (Future&lt;Void&gt; future: futures) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>        future.get();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    } catch (InterruptedException e) {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      throw new InterruptedIOException(e.getMessage());<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    } catch (ExecutionException e) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      throw new IOException(e.getCause());<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    }<a name="line.205"></a>
+<span class="sourceLineNo">206</span>  }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  private static synchronized ThreadPoolExecutor getArchiveExecutor(final Configuration conf) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (archiveExecutor == null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      int maxThreads = conf.getInt("hbase.hfilearchiver.thread.pool.max", 8);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      archiveExecutor = Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        getThreadFactory());<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>      // Shutdown this ThreadPool in a shutdown hook<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      Runtime.getRuntime().addShutdownHook(new Thread(() -&gt; archiveExecutor.shutdown()));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    return archiveExecutor;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>  // We need this method instead of Threads.getNamedThreadFactory() to pass some tests.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  // The difference from Threads.getNamedThreadFactory() is that it doesn't fix ThreadGroup for<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  // new threads. If we use Threads.getNamedThreadFactory(), we will face ThreadGroup related<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  // issues in some tests.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private static ThreadFactory getThreadFactory() {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    return new ThreadFactory() {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      final AtomicInteger threadNumber = new AtomicInteger(1);<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>      @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      public Thread newThread(Runnable r) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        final String name = "HFileArchiver-" + threadNumber.getAndIncrement();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        Thread t = new Thread(r, name);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        t.setDaemon(true);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        return t;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    };<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * Remove from the specified region the store files of the specified column family,<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * either by archiving them or outright deletion<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   * @param fs the filesystem where the store files live<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   * @param parent Parent region hosting the store files<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * @param family the family hosting the store files<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  public static void archiveFamily(FileSystem fs, Configuration conf,<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      RegionInfo parent, Path tableDir, byte[] family) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    Path familyDir = new Path(tableDir, new Path(parent.getEncodedName(), Bytes.toString(family)));<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    archiveFamilyByFamilyDir(fs, conf, parent, familyDir, family);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  }<a name="line.252"></a>
+<span class="sourceLineNo">253</span><a name="line.253"></a>
+<span class="sourceLineNo">254</span>  /**<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * Removes from the specified region the store files of the specified column family,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * either by archiving them or outright deletion<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   * @param fs the filesystem where the store files live<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param parent Parent region hosting the store files<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param familyDir {@link Path} to where the family is being stored<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param family the family hosting the store files<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   */<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  public static void archiveFamilyByFamilyDir(FileSystem fs, Configuration conf,<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      RegionInfo parent, Path familyDir, byte[] family) throws IOException {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    FileStatus[] storeFiles = FSUtils.listStatus(fs, familyDir);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    if (storeFiles == null) {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      LOG.debug("No files to dispose of in {}, family={}", parent.getRegionNameAsString(),<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          Bytes.toString(family));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      return;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>    FileStatusConverter getAsFile = new FileStatusConverter(fs);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    Collection&lt;File&gt; toArchive = Stream.of(storeFiles).map(getAsFile).collect(Collectors.toList());<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, parent, family);<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>    // do the actual archive<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    List&lt;File&gt; failedArchive = resolveAndArchive(fs, storeArchiveDir, toArchive,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        EnvironmentEdgeManager.currentTime());<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (!failedArchive.isEmpty()){<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.282"></a>
+<span class="sourceLineNo">283</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    }<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>  /**<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * Remove the store files, either by archiving them or outright deletion<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.290"></a>
+<span class="sourceLineNo">291</span>   * @param fs the filesystem where the store files live<a name="line.291"></a>
+<span class="sourceLineNo">292</span>   * @param regionInfo {@link RegionInfo} of the region hosting the store files<a name="line.292"></a>
+<span class="sourceLineNo">293</span>   * @param family the family hosting the store files<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * @param compactedFiles files to be disposed of. No further reading of these files should be<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   *          attempted; otherwise likely to cause an {@link IOException}<a name="line.295"></a>
+<span class="sourceLineNo">296</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   */<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  public static void archiveStoreFiles(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path tableDir, byte[] family, Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throws IOException {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    archive(fs, regionInfo, family, compactedFiles, storeArchiveDir);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  }<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /**<a name="line.305"></a>
+<span class="sourceLineNo">306</span>   * Archive recovered edits using existing logic for archiving store files. This is currently only<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * relevant when &lt;b&gt;hbase.region.archive.recovered.edits&lt;/b&gt; is true, as recovered edits shouldn't<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   * be kept after replay. In theory, we could use very same method available for archiving<a name="line.308"></a>
+<span class="sourceLineNo">309</span>   * store files, but supporting WAL dir and store files on different FileSystems added the need for<a name="line.309"></a>
+<span class="sourceLineNo">310</span>   * extra validation of the passed FileSystem instance and the path where the archiving edits<a name="line.310"></a>
+<span class="sourceLineNo">311</span>   * should be placed.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>   * @param conf {@link Configuration} to determine the archive directory.<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * @param fs the filesystem used for storing WAL files.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * @param regionInfo {@link RegionInfo} a pseudo region representation for the archiving logic.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * @param family a pseudo familiy representation for the archiving logic.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * @param replayedEdits the recovered edits to be archived.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @throws IOException if files can't be achived due to some internal error.<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  public static void archiveRecoveredEdits(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    byte[] family, Collection&lt;HStoreFile&gt; replayedEdits)<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    throws IOException {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    String workingDir = conf.get(CommonFSUtils.HBASE_WAL_DIR, conf.get(HConstants.HBASE_DIR));<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    //extra sanity checks for the right FS<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    Path path = new Path(workingDir);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    if(path.isAbsoluteAndSchemeAuthorityNull()){<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      //no schema specified on wal dir value, so it's on same FS as StoreFiles<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      path = new Path(conf.get(HConstants.HBASE_DIR));<a name="line.327"></a>
+<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    if(path.toUri().getScheme()!=null &amp;&amp; !path.toUri().getScheme().equals(fs.getScheme())){<a name="line.329"></a>
+<span class="sourceLineNo">330</span>      throw new IOException("Wrong file system! Should be " + path.toUri().getScheme() +<a name="line.330"></a>
+<span class="sourceLineNo">331</span>        ", but got " +  fs.getScheme());<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    path = HFileArchiveUtil.getStoreArchivePathForRootDir(path, regionInfo, family);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    archive(fs, regionInfo, family, replayedEdits, path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  private static void archive(FileSystem fs, RegionInfo regionInfo, byte[] family,<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    Collection&lt;HStoreFile&gt; compactedFiles, Path storeArchiveDir) throws IOException {<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    // sometimes in testing, we don't have rss, so we need to check for that<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    if (fs == null) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," +<a name="line.341"></a>
+<span class="sourceLineNo">342</span>              "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      deleteStoreFilesWithoutArchiving(compactedFiles);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      return;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span><a name="line.346"></a>
+<span class="sourceLineNo">347</span>    // short circuit if we don't have any files to delete<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    if (compactedFiles.isEmpty()) {<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      LOG.debug("No files to dispose of, done!");<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      return;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    }<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // build the archive path<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    if (regionInfo == null || family == null) throw new IOException(<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        "Need to have a region and a family to archive from.");<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.358"></a>
+<span class="sourceLineNo">359</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>    // otherwise we attempt to archive the store files<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    LOG.debug("Archiving compacted files.");<a name="line.363"></a>
 <span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>    // do the actual archive<a name="line.365"></a>
-<span class="sourceLineNo">366</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    File file = new FileablePath(fs, storeFile);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.369"></a>
-<span class="sourceLineNo">370</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.370"></a>
-<span class="sourceLineNo">371</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * renaming of the existing file and then archive the passed in files.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   *          the files to archive are directories, will append the name of the<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *          directory to the base archive directory name, creating a parallel<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   *          structure.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param toArchive files/directories that need to be archvied<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param start time the archiving started - used for resolving archive<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   *          conflicts.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @return the list of failed to archive files.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    // short circuit if no files to move<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    if (toArchive.isEmpty()) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      return Collections.emptyList();<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span><a name="line.395"></a>
-<span class="sourceLineNo">396</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // make sure the archive directory exists<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.401"></a>
-<span class="sourceLineNo">402</span>            + ", quitting archive attempt.");<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    String startTime = Long.toString(start);<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    for (File file : toArchive) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // if its a file archive it<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      try {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        LOG.trace("Archiving {}", file);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>        if (file.isFile()) {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>          // attempt to archive the file<a name="line.414"></a>
-<span class="sourceLineNo">415</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            failures.add(file);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          }<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        } else {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // otherwise its a directory and we need to archive all files<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // so we add the directory name to the one base archive<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // and then get all the files from that directory and attempt to<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          // archive those too<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.426"></a>
-<span class="sourceLineNo">427</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        }<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      } catch (IOException e) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        failures.add(file);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      }<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    return failures;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>  /**<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>   * &lt;p&gt;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * the archive directory and the new file is put in its place.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.443"></a>
-<span class="sourceLineNo">444</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.444"></a>
-<span class="sourceLineNo">445</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   *         problem, but the operation still completed.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      String archiveStartTime) throws IOException {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    // build path as it should be in the archive<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String filename = currentFile.getName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    // is included just for that 1 in trillion chance.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    if (fs.exists(archiveFile)) {<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.460"></a>
-<span class="sourceLineNo">461</span>          "overwriting current.", archiveFile);<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      // move the archive file to the stamped backup<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            + ", deleting existing file in favor of newer.");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        // try to delete the exisiting file, if we can't rename it<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        if (!fs.delete(archiveFile, false)) {<a name="line.469"></a>
-<span class="sourceLineNo">470</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.471"></a>
-<span class="sourceLineNo">472</span>              + ") to make room for similarly named file.");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    }<a name="line.476"></a>
-<span class="sourceLineNo">477</span><a name="line.477"></a>
-<span class="sourceLineNo">478</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    // at this point, we should have a free spot for the archive file<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    boolean success = false;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      if (i &gt; 0) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>        // Ensure that the archive directory exists.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        // The previous "move to archive" operation has failed probably because<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        try {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>          if (!fs.exists(archiveDir)) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            if (fs.mkdirs(archiveDir)) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>            }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        } catch (IOException e) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.495"></a>
-<span class="sourceLineNo">496</span>        }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      }<a name="line.497"></a>
+<span class="sourceLineNo">365</span>    // Wrap the storefile into a File<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    StoreToFile getStorePath = new StoreToFile(fs);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    Collection&lt;File&gt; storeFiles =<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      compactedFiles.stream().map(getStorePath).collect(Collectors.toList());<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    // do the actual archive<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    List&lt;File&gt; failedArchive =<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());<a name="line.372"></a>
+<span class="sourceLineNo">373</span><a name="line.373"></a>
+<span class="sourceLineNo">374</span>    if (!failedArchive.isEmpty()){<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      throw new FailedArchiveException("Failed to archive/delete all the files for region:"<a name="line.375"></a>
+<span class="sourceLineNo">376</span>          + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family)<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.",<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * Archive the store file<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * @param fs the filesystem where the store files live<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * @param regionInfo region hosting the store files<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @param conf {@link Configuration} to examine to determine the archive directory<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.387"></a>
+<span class="sourceLineNo">388</span>   * @param family the family hosting the store files<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * @param storeFile file to be archived<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @throws IOException if the files could not be correctly disposed.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   */<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  public static void archiveStoreFile(Configuration conf, FileSystem fs, RegionInfo regionInfo,<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      Path tableDir, byte[] family, Path storeFile) throws IOException {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    Path storeArchiveDir = HFileArchiveUtil.getStoreArchivePath(conf, regionInfo, tableDir, family);<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    // make sure we don't archive if we can't and that the archive dir exists<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    if (!fs.mkdirs(storeArchiveDir)) {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:"<a name="line.397"></a>
+<span class="sourceLineNo">398</span>          + Bytes.toString(family) + ", deleting compacted files instead.");<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    }<a name="line.399"></a>
+<span class="sourceLineNo">400</span><a name="line.400"></a>
+<span class="sourceLineNo">401</span>    // do the actual archive<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    long start = EnvironmentEdgeManager.currentTime();<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    File file = new FileablePath(fs, storeFile);<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    if (!resolveAndArchiveFile(storeArchiveDir, file, Long.toString(start))) {<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throw new IOException("Failed to archive/delete the file for region:"<a name="line.405"></a>
+<span class="sourceLineNo">406</span>          + regionInfo.getRegionNameAsString() + ", family:" + Bytes.toString(family)<a name="line.406"></a>
+<span class="sourceLineNo">407</span>          + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.");<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    }<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  }<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>   * Resolve any conflict with an existing archive file via timestamp-append<a name="line.412"></a>
+<span class="sourceLineNo">413</span>   * renaming of the existing file and then archive the passed in files.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @param fs {@link FileSystem} on which to archive the files<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * @param baseArchiveDir base archive directory to store the files. If any of<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   *          the files to archive are directories, will append the name of the<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   *          directory to the base archive directory name, creating a parallel<a name="line.417"></a>
+<span class="sourceLineNo">418</span>   *          structure.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>   * @param toArchive files/directories that need to be archvied<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * @param start time the archiving started - used for resolving archive<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   *          conflicts.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   * @return the list of failed to archive files.<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * @throws IOException if an unexpected file operation exception occurred<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>  private static List&lt;File&gt; resolveAndArchive(FileSystem fs, Path baseArchiveDir,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Collection&lt;File&gt; toArchive, long start) throws IOException {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    // short circuit if no files to move<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    if (toArchive.isEmpty()) {<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      return Collections.emptyList();<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    }<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>    LOG.trace("Moving files to the archive directory {}", baseArchiveDir);<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    // make sure the archive directory exists<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    if (!fs.exists(baseArchiveDir)) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      if (!fs.mkdirs(baseArchiveDir)) {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        throw new IOException("Failed to create the archive directory:" + baseArchiveDir<a name="line.437"></a>
+<span class="sourceLineNo">438</span>            + ", quitting archive attempt.");<a name="line.438"></a>
+<span class="sourceLineNo">439</span>      }<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      LOG.trace("Created archive directory {}", baseArchiveDir);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    List&lt;File&gt; failures = new ArrayList&lt;&gt;();<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    String startTime = Long.toString(start);<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    for (File file : toArchive) {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      // if its a file archive it<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      try {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        LOG.trace("Archiving {}", file);<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        if (file.isFile()) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          // attempt to archive the file<a name="line.450"></a>
+<span class="sourceLineNo">451</span>          if (!resolveAndArchiveFile(baseArchiveDir, file, startTime)) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>            LOG.warn("Couldn't archive " + file + " into backup directory: " + baseArchiveDir);<a name="line.452"></a>
+<span class="sourceLineNo">453</span>            failures.add(file);<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          // otherwise its a directory and we need to archive all files<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          LOG.trace("{} is a directory, archiving children files", file);<a name="line.457"></a>
+<span class="sourceLineNo">458</span>          // so we add the directory name to the one base archive<a name="line.458"></a>
+<span class="sourceLineNo">459</span>          Path parentArchiveDir = new Path(baseArchiveDir, file.getName());<a name="line.459"></a>
+<span class="sourceLineNo">460</span>          // and then get all the files from that directory and attempt to<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          // archive those too<a name="line.461"></a>
+<span class="sourceLineNo">462</span>          Collection&lt;File&gt; children = file.getChildren();<a name="line.462"></a>
+<span class="sourceLineNo">463</span>          failures.addAll(resolveAndArchive(fs, parentArchiveDir, children, start));<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        }<a name="line.464"></a>
+<span class="sourceLineNo">465</span>      } catch (IOException e) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        LOG.warn("Failed to archive {}", file, e);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        failures.add(file);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      }<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    }<a name="line.469"></a>
+<span class="sourceLineNo">470</span>    return failures;<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * Attempt to archive the passed in file to the archive directory.<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * &lt;p&gt;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * If the same file already exists in the archive, it is moved to a timestamped directory under<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * the archive directory and the new file is put in its place.<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * @param archiveDir {@link Path} to the directory that stores the archives of the hfiles<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * @param currentFile {@link Path} to the original HFile that will be archived<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * @param archiveStartTime time the archiving started, to resolve naming conflicts<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the file is successfully archived. &lt;tt&gt;false&lt;/tt&gt; if there was a<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   *         problem, but the operation still completed.<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * @throws IOException on failure to complete {@link FileSystem} operations.<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>  private static boolean resolveAndArchiveFile(Path archiveDir, File currentFile,<a name="line.485"></a>
+<span class="sourceLineNo">486</span>      String archiveStartTime) throws IOException {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>    // build path as it should be in the archive<a name="line.487"></a>
+<span class="sourceLineNo">488</span>    String filename = currentFile.getName();<a name="line.488"></a>
+<span class="sourceLineNo">489</span>    Path archiveFile = new Path(archiveDir, filename);<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    FileSystem fs = currentFile.getFileSystem();<a name="line.490"></a>
+<span class="sourceLineNo">491</span><a name="line.491"></a>
+<span class="sourceLineNo">492</span>    // if the file already exists in the archive, move that one to a timestamped backup. This is a<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    // really, really unlikely situtation, where we get the same name for the existing file, but<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    // is included just for that 1 in trillion chance.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    if (fs.exists(archiveFile)) {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      LOG.debug("{} already exists in archive, moving to timestamped backup and " +<a name="line.496"></a>
+<span class="sourceLineNo">497</span>          "overwriting current.", archiveFile);<a name="line.497"></a>
 <span class="sourceLineNo">498</span><a name="line.498"></a>
-<span class="sourceLineNo">499</span>      try {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      } catch (FileNotFoundException fnfe) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.502"></a>
-<span class="sourceLineNo">503</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        success = true;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      } catch (IOException e) {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        success = false;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      }<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    if (!success) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      LOG.error("Failed to archive " + currentFile);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return false;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
+<span class="sourceLineNo">499</span>      // move the archive file to the stamped backup<a name="line.499"></a>
+<span class="sourceLineNo">500</span>      Path backedupArchiveFile = new Path(archiveDir, filename + SEPARATOR + archiveStartTime);<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      if (!fs.rename(archiveFile, backedupArchiveFile)) {<a name="line.501"></a>
+<span class="sourceLineNo">502</span>        LOG.error("Could not rename archive file to backup: " + backedupArchiveFile<a name="line.502"></a>
+<span class="sourceLineNo">503</span>            + ", deleting existing file in favor of newer.");<a name="line.503"></a>
+<span class="sourceLineNo">504</span>        // try to delete the exisiting file, if we can't rename it<a name="line.504"></a>
+<span class="sourceLineNo">505</span>        if (!fs.delete(archiveFile, false)) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          throw new IOException("Couldn't delete existing archive file (" + archiveFile<a name="line.506"></a>
+<span class="sourceLineNo">507</span>              + ") or rename it to the backup file (" + backedupArchiveFile<a name="line.507"></a>
+<span class="sourceLineNo">508</span>              + ") to make room for similarly named file.");<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        }<a name="line.509"></a>
+<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      LOG.debug("Backed up archive file from " + archiveFile);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>    }<a name="line.512"></a>
+<span class="sourceLineNo">513</span><a name="line.513"></a>
+<span class="sourceLineNo">514</span>    LOG.trace("No existing file in archive for {}, free to archive original file.", archiveFile);<a name="line.514"></a>
 <span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return true;<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
-<span class="sourceLineNo">519</span><a name="line.519"></a>
-<span class="sourceLineNo">520</span>  /**<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param fs FileSystem from which to delete the region<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @throws IOException on filesystem operation failure<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    if (fs.delete(regionDir, true)) {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      LOG.debug("Deleted {}", regionDir);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      return true;<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    }<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    return false;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * Just do a simple delete of the given store files<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * &lt;p&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * &lt;p&gt;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @param compactedFiles store files to delete from the file system.<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.543"></a>
-<span class="sourceLineNo">544</span>   *           throwing the exception, rather than failing at the first file.<a name="line.544"></a>
-<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      throws IOException {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    LOG.debug("Deleting files without archiving.");<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      try {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>        hsf.deleteStoreFile();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      } catch (IOException e) {<a name="line.553"></a>
-<span class="sourceLineNo">554</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.554"></a>
-<span class="sourceLineNo">555</span>        errors.add(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    if (errors.size() &gt; 0) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      throw MultipleIOException.createIOException(errors);<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>  }<a name="line.561"></a>
-<span class="sourceLineNo">562</span><a name="line.562"></a>
-<span class="sourceLineNo">563</span>  /**<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * archival/removal of files<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    protected final FileSystem fs;<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    public FileConverter(FileSystem fs) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      this.fs = fs;<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    }<a name="line.573"></a>
-<span class="sourceLineNo">574</span>  }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>  /**<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   */<a name="line.578"></a>
-<span class="sourceLineNo">579</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    public FileStatusConverter(FileSystem fs) {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      super(fs);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    }<a name="line.582"></a>
-<span class="sourceLineNo">583</span><a name="line.583"></a>
-<span class="sourceLineNo">584</span>    @Override<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    public File apply(FileStatus input) {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      return new FileablePath(fs, input.getPath());<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    }<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * methods<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   */<a name="line.593"></a>
-<span class="sourceLineNo">594</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    public StoreToFile(FileSystem fs) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      super(fs);<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
+<span class="sourceLineNo">516</span>    // at this point, we should have a free spot for the archive file<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    boolean success = false;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    for (int i = 0; !success &amp;&amp; i &lt; DEFAULT_RETRIES_NUMBER; ++i) {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>      if (i &gt; 0) {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        // Ensure that the archive directory exists.<a name="line.520"></a>
+<span class="sourceLineNo">521</span>        // The previous "move to archive" operation has failed probably because<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        // the cleaner has removed our archive directory (HBASE-7643).<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        // (we're in a retry loop, so don't worry too much about the exception)<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          if (!fs.exists(archiveDir)) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>            if (fs.mkdirs(archiveDir)) {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>              LOG.debug("Created archive directory {}", archiveDir);<a name="line.527"></a>
+<span class="sourceLineNo">528</span>            }<a name="line.528"></a>
+<span class="sourceLineNo">529</span>          }<a name="line.529"></a>
+<span class="sourceLineNo">530</span>        } catch (IOException e) {<a name="line.530"></a>
+<span class="sourceLineNo">531</span>          LOG.warn("Failed to create directory {}", archiveDir, e);<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      }<a name="line.533"></a>
+<span class="sourceLineNo">534</span><a name="line.534"></a>
+<span class="sourceLineNo">535</span>      try {<a name="line.535"></a>
+<span class="sourceLineNo">536</span>        success = currentFile.moveAndClose(archiveFile);<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      } catch (FileNotFoundException fnfe) {<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        LOG.warn("Failed to archive " + currentFile +<a name="line.538"></a>
+<span class="sourceLineNo">539</span>            " because it does not exist! Skipping and continuing on.", fnfe);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        success = true;<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      } catch (IOException e) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        LOG.warn("Failed to archive " + currentFile + " on try #" + i, e);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>        success = false;<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      }<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!success) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      LOG.error("Failed to archive " + currentFile);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      return false;<a name="line.549"></a>
+<span class="sourceLineNo">550</span>    }<a name="line.550"></a>
+<span class="sourceLineNo">551</span><a name="line.551"></a>
+<span class="sourceLineNo">552</span>    LOG.debug("Archived from {} to {}", currentFile, archiveFile);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>    return true;<a name="line.553"></a>
+<span class="sourceLineNo">554</span>  }<a name="line.554"></a>
+<span class="sourceLineNo">555</span><a name="line.555"></a>
+<span class="sourceLineNo">556</span>  /**<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * Without regard for backup, delete a region. Should be used with caution.<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param regionDir {@link Path} to the region to be deleted.<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param fs FileSystem from which to delete the region<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return &lt;tt&gt;true&lt;/tt&gt; on successful deletion, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException on filesystem operation failure<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  private static boolean deleteRegionWithoutArchiving(FileSystem fs, Path regionDir)<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      throws IOException {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    if (fs.delete(regionDir, true)) {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>      LOG.debug("Deleted {}", regionDir);<a name="line.566"></a>
+<span class="sourceLineNo">567</span>      return true;<a name="line.567"></a>
+<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    LOG.debug("Failed to delete directory {}", regionDir);<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    return false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span>  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>  /**<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   * Just do a simple delete of the given store files<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   * &lt;p&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   * A best effort is made to delete each of the files, rather than bailing on the first failure.<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   * &lt;p&gt;<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   * @param compactedFiles store files to delete from the file system.<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * @throws IOException if a file cannot be deleted. All files will be attempted to deleted before<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   *           throwing the exception, rather than failing at the first file.<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  private static void deleteStoreFilesWithoutArchiving(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      throws IOException {<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    LOG.debug("Deleting files without archiving.");<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    List&lt;IOException&gt; errors = new ArrayList&lt;&gt;(0);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    for (HStoreFile hsf : compactedFiles) {<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
+<span class="sourceLineNo">588</span>        hsf.deleteStoreFile();<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      } catch (IOException e) {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>        LOG.error("Failed to delete {}", hsf.getPath());<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        errors.add(e);<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      }<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    }<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    if (errors.size() &gt; 0) {<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      throw MultipleIOException.createIOException(errors);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
+<span class="sourceLineNo">597</span>  }<a name="line.597"></a>
 <span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    @Override<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    public File apply(HStoreFile input) {<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      return new FileableStoreFile(fs, input);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    }<a name="line.602"></a>
-<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
-<span class="sourceLineNo">604</span><a name="line.604"></a>
-<span class="sourceLineNo">605</span>  /**<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   * Wrapper to handle file operations uniformly<a name="line.606"></a>
-<span class="sourceLineNo">607</span>   */<a name="line.607"></a>
-<span class="sourceLineNo">608</span>  private static abstract class File {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    protected final FileSystem fs;<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public File(FileSystem fs) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      this.fs = fs;<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>     * Delete the file<a name="line.616"></a>
-<span class="sourceLineNo">617</span>     * @throws IOException on failure<a name="line.617"></a>
-<span class="sourceLineNo">618</span>     */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    abstract void delete() throws IOException;<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    /**<a name="line.621"></a>
-<span class="sourceLineNo">622</span>     * Check to see if this is a file or a directory<a name="line.622"></a>
-<span class="sourceLineNo">623</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.623"></a>
-<span class="sourceLineNo">624</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.624"></a>
-<span class="sourceLineNo">625</span>     */<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    abstract boolean isFile() throws IOException;<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>    /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>     * @return if this is a directory, returns all the children in the<a name="line.629"></a>
-<span class="sourceLineNo">630</span>     *         directory, otherwise returns an empty list<a name="line.630"></a>
-<span class="sourceLineNo">631</span>     * @throws IOException<a name="line.631"></a>
-<span class="sourceLineNo">632</span>     */<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.633"></a>
+<span class="sourceLineNo">599</span>  /**<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * Adapt a type to match the {@link File} interface, which is used internally for handling<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * archival/removal of files<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @param &lt;T&gt; type to adapt to the {@link File} interface<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
+<span class="sourceLineNo">604</span>  private static abstract class FileConverter&lt;T&gt; implements Function&lt;T, File&gt; {<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    protected final FileSystem fs;<a name="line.605"></a>
+<span class="sourceLineNo">606</span><a name="line.606"></a>
+<span class="sourceLineNo">607</span>    public FileConverter(FileSystem fs) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      this.fs = fs;<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  /**<a name="line.612"></a>
+<span class="sourceLineNo">613</span>   * Convert a FileStatus to something we can manage in the archiving<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   */<a name="line.614"></a>
+<span class="sourceLineNo">615</span>  private static class FileStatusConverter extends FileConverter&lt;FileStatus&gt; {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>    public FileStatusConverter(FileSystem fs) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      super(fs);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span><a name="line.619"></a>
+<span class="sourceLineNo">620</span>    @Override<a name="line.620"></a>
+<span class="sourceLineNo">621</span>    public File apply(FileStatus input) {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      return new FileablePath(fs, input.getPath());<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Convert the {@link HStoreFile} into something we can manage in the archive<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * methods<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   */<a name="line.629"></a>
+<span class="sourceLineNo">630</span>  private static class StoreToFile extends FileConverter&lt;HStoreFile&gt; {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>    public StoreToFile(FileSystem fs) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      super(fs);<a name="line.632"></a>
+<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
 <span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * close any outside readers of the file<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * @throws IOException<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     */<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    abstract void close() throws IOException;<a name="line.639"></a>
+<span class="sourceLineNo">635</span>    @Override<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    public File apply(HStoreFile input) {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      return new FileableStoreFile(fs, input);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>  }<a name="line.639"></a>
 <span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    /**<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     *         file name)<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     */<a name="line.644"></a>
-<span class="sourceLineNo">645</span>    abstract String getName();<a name="line.645"></a>
+<span class="sourceLineNo">641</span>  /**<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * Wrapper to handle file operations uniformly<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  private static abstract class File {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>    protected final FileSystem fs;<a name="line.645"></a>
 <span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>    /**<a name="line.647"></a>
-<span class="sourceLineNo">648</span>     * @return the path to this file<a name="line.648"></a>
-<span class="sourceLineNo">649</span>     */<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    abstract Path getPath();<a name="line.650"></a>
-<span class="sourceLineNo">651</span><a name="line.651"></a>
-<span class="sourceLineNo">652</span>    /**<a name="line.652"></a>
-<span class="sourceLineNo">653</span>     * Move the file to the given destination<a name="line.653"></a>
-<span class="sourceLineNo">654</span>     * @param dest<a name="line.654"></a>
-<span class="sourceLineNo">655</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.655"></a>
-<span class="sourceLineNo">656</span>     * @throws IOException<a name="line.656"></a>
-<span class="sourceLineNo">657</span>     */<a name="line.657"></a>
-<span class="sourceLineNo">658</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      this.close();<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      Path p = this.getPath();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    }<a name="line.662"></a>
+<span class="sourceLineNo">647</span>    public File(FileSystem fs) {<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      this.fs = fs;<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    /**<a name="line.651"></a>
+<span class="sourceLineNo">652</span>     * Delete the file<a name="line.652"></a>
+<span class="sourceLineNo">653</span>     * @throws IOException on failure<a name="line.653"></a>
+<span class="sourceLineNo">654</span>     */<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    abstract void delete() throws IOException;<a name="line.655"></a>
+<span class="sourceLineNo">656</span><a name="line.656"></a>
+<span class="sourceLineNo">657</span>    /**<a name="line.657"></a>
+<span class="sourceLineNo">658</span>     * Check to see if this is a file or a directory<a name="line.658"></a>
+<span class="sourceLineNo">659</span>     * @return &lt;tt&gt;true&lt;/tt&gt; if it is a file, &lt;tt&gt;false&lt;/tt&gt; otherwise<a name="line.659"></a>
+<span class="sourceLineNo">660</span>     * @throws IOException on {@link FileSystem} connection error<a name="line.660"></a>
+<span class="sourceLineNo">661</span>     */<a name="line.661"></a>
+<span class="sourceLineNo">662</span>    abstract boolean isFile() throws IOException;<a name="line.662"></a>
 <span class="sourceLineNo">663</span><a name="line.663"></a>
 <span class="sourceLineNo">664</span>    /**<a name="line.664"></a>
-<span class="sourceLineNo">665</span>     * @return the {@link FileSystem} on which this file resides<a name="line.665"></a>
-<span class="sourceLineNo">666</span>     */<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public FileSystem getFileSystem() {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return this.fs;<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
+<span class="sourceLineNo">665</span>     * @return if this is a directory, returns all the children in the<a name="line.665"></a>
+<span class="sourceLineNo">666</span>     *         directory, otherwise returns an empty list<a name="line.666"></a>
+<span class="sourceLineNo">667</span>     * @throws IOException<a name="line.667"></a>
+<span class="sourceLineNo">668</span>     */<a name="line.668"></a>
+<span class="sourceLineNo">669</span>    abstract Collection&lt;File&gt; getChildren() throws IOException;<a name="line.669"></a>
 <span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    @Override<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public String toString() {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    }<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
+<span class="sourceLineNo">671</span>    /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>     * close any outside readers of the file<a name="line.672"></a>
+<span class="sourceLineNo">673</span>     * @throws IOException<a name="line.673"></a>
+<span class="sourceLineNo">674</span>     */<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    abstract void close() throws IOException;<a name="line.675"></a>
 <span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  /**<a name="line.677"></a>
-<span class="sourceLineNo">678</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.678"></a>
-<span class="sourceLineNo">679</span>   */<a name="line.679"></a>
-<span class="sourceLineNo">680</span>  private static class FileablePath extends File {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    private final Path file;<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    private final FileStatusConverter getAsFile;<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      super(fs);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>      this.file = file;<a name="line.686"></a>
-<span class="sourceLineNo">687</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>    @Override<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public void delete() throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    @Override<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    public String getName() {<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      return file.getName();<a name="line.697"></a>
+<span class="sourceLineNo">677</span>    /**<a name="line.677"></a>
+<span class="sourceLineNo">678</span>     * @return the name of the file (not the full fs path, just the individual<a name="line.678"></a>
+<span class="sourceLineNo">679</span>     *         file name)<a name="line.679"></a>
+<span class="sourceLineNo">680</span>     */<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    abstract String getName();<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    /**<a name="line.683"></a>
+<span class="sourceLineNo">684</span>     * @return the path to this file<a name="line.684"></a>
+<span class="sourceLineNo">685</span>     */<a name="line.685"></a>
+<span class="sourceLineNo">686</span>    abstract Path getPath();<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>    /**<a name="line.688"></a>
+<span class="sourceLineNo">689</span>     * Move the file to the given destination<a name="line.689"></a>
+<span class="sourceLineNo">690</span>     * @param dest<a name="line.690"></a>
+<span class="sourceLineNo">691</span>     * @return &lt;tt&gt;true&lt;/tt&gt; on success<a name="line.691"></a>
+<span class="sourceLineNo">692</span>     * @throws IOException<a name="line.692"></a>
+<span class="sourceLineNo">693</span>     */<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    public boolean moveAndClose(Path dest) throws IOException {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>      this.close();<a name="line.695"></a>
+<span class="sourceLineNo">696</span>      Path p = this.getPath();<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      return FSUtils.renameAndSetModifyTime(fs, p, dest);<a name="line.697"></a>
 <span class="sourceLineNo">698</span>    }<a name="line.698"></a>
 <span class="sourceLineNo">699</span><a name="line.699"></a>
-<span class="sourceLineNo">700</span>    @Override<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      if (fs.isFile(file)) {<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        return Collections.emptyList();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public boolean isFile() throws IOException {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return fs.isFile(file);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
+<span class="sourceLineNo">700</span>    /**<a name="line.700"></a>
+<span class="sourceLineNo">701</span>     * @return the {@link FileSystem} on which this file resides<a name="line.701"></a>
+<span class="sourceLineNo">702</span>     */<a name="line.702"></a>
+<span class="sourceLineNo">703</span>    public FileSystem getFileSystem() {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>      return this.fs;<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
+<span class="sourceLineNo">706</span><a name="line.706"></a>
+<span class="sourceLineNo">707</span>    @Override<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    public String toString() {<a name="line.708"></a>
+<span class="sourceLineNo">709</span>      return this.getClass().getSimpleName() + ", " + getPath().toString();<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    }<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  }<a name="line.711"></a>
 <span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public void close() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      // NOOP - files are implicitly closed on removal<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    Path getPath() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return file;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  /**<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * .<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   */<a name="line.727"></a>
-<span class="sourceLineNo">728</span>  private static class FileableStoreFile extends File {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    HStoreFile file;<a name="line.729"></a>
+<span class="sourceLineNo">713</span>  /**<a name="line.713"></a>
+<span class="sourceLineNo">714</span>   * A {@link File} that wraps a simple {@link Path} on a {@link FileSystem}.<a name="line.714"></a>
+<span class="sourceLineNo">715</span>   */<a name="line.715"></a>
+<span class="sourceLineNo">716</span>  private static class FileablePath extends File {<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    private final Path file;<a name="line.717"></a>
+<span class="sourceLineNo">718</span>    private final FileStatusConverter getAsFile;<a name="line.718"></a>
+<span class="sourceLineNo">719</span><a name="line.719"></a>
+<span class="sourceLineNo">720</span>    public FileablePath(FileSystem fs, Path file) {<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      super(fs);<a name="line.721"></a>
+<span class="sourceLineNo">722</span>      this.file = file;<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      this.getAsFile = new FileStatusConverter(fs);<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    }<a name="line.724"></a>
+<span class="sourceLineNo">725</span><a name="line.725"></a>
+<span class="sourceLineNo">726</span>    @Override<a name="line.726"></a>
+<span class="sourceLineNo">727</span>    public void delete() throws IOException {<a name="line.727"></a>
+<span class="sourceLineNo">728</span>      if (!fs.delete(file, true)) throw new IOException("Failed to delete:" + this.file);<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
 <span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      super(fs);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      this.file = store;<a name="line.733"></a>
+<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    public String getName() {<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      return file.getName();<a name="line.733"></a>
 <span class="sourceLineNo">734</span>    }<a name="line.734"></a>
 <span class="sourceLineNo">735</span><a name="line.735"></a>
 <span class="sourceLineNo">736</span>    @Override<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    public void delete() throws IOException {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      file.deleteStoreFile();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>    @Override<a name="line.741"></a>
-<span class="sourceLineNo">742</span>    public String getName() {<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      return file.getPath().getName();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public boolean isFile() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      return true;<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>    @Override<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      // storefiles don't have children<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      return Collections.emptyList();<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    }<a name="line.755"></a>
-<span class="sourceLineNo">756</span><a name="line.756"></a>
-<span class="sourceLineNo">757</span>    @Override<a name="line.757"></a>
-<span class="sourceLineNo">758</span>    public void close() throws IOException {<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      file.closeStoreFile(true);<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    Path getPath() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return file.getPath();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>}<a name="line.767"></a>
+<span class="sourceLineNo">737</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.737"></a>
+<span class="sourceLineNo">738</span>      if (fs.isFile(file)) {<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        return Collections.emptyList();<a name="line.739"></a>
+<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
+<span class="sourceLineNo">741</span>      return Stream.of(fs.listStatus(file)).map(getAsFile).collect(Collectors.toList());<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>    @Override<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    public boolean isFile() throws IOException {<a name="line.745"></a>
+<span class="sourceLineNo">746</span>      return fs.isFile(file);<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    }<a name="line.747"></a>
+<span class="sourceLineNo">748</span><a name="line.748"></a>
+<span class="sourceLineNo">749</span>    @Override<a name="line.749"></a>
+<span class="sourceLineNo">750</span>    public void close() throws IOException {<a name="line.750"></a>
+<span class="sourceLineNo">751</span>      // NOOP - files are implicitly closed on removal<a name="line.751"></a>
+<span class="sourceLineNo">752</span>    }<a name="line.752"></a>
+<span class="sourceLineNo">753</span><a name="line.753"></a>
+<span class="sourceLineNo">754</span>    @Override<a name="line.754"></a>
+<span class="sourceLineNo">755</span>    Path getPath() {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>      return file;<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    }<a name="line.757"></a>
+<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
+<span class="sourceLineNo">759</span><a name="line.759"></a>
+<span class="sourceLineNo">760</span>  /**<a name="line.760"></a>
+<span class="sourceLineNo">761</span>   * {@link File} adapter for a {@link HStoreFile} living on a {@link FileSystem}<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * .<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   */<a name="line.763"></a>
+<span class="sourceLineNo">764</span>  private static class FileableStoreFile extends File {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>    HStoreFile file;<a name="line.765"></a>
+<span class="sourceLineNo">766</span><a name="line.766"></a>
+<span class="sourceLineNo">767</span>    public FileableStoreFile(FileSystem fs, HStoreFile store) {<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      super(fs);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      this.file = store;<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    }<a name="line.770"></a>
+<span class="sourceLineNo">771</span><a name="line.771"></a>
+<span class="sourceLineNo">772</span>    @Override<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    public void delete() throws IOException {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      file.deleteStoreFile();<a name="line.774"></a>
+<span class="sourceLineNo">775</span>    }<a name="line.775"></a>
+<span class="sourceLineNo">776</span><a name="line.776"></a>
+<span class="sourceLineNo">777</span>    @Override<a name="line.777"></a>
+<span class="sourceLineNo">778</span>    public String getName() {<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      return file.getPath().getName();<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
+<span class="sourceLineNo">781</span><a name="line.781"></a>
+<span class="sourceLineNo">782</span>    @Override<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    public boolean isFile() {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>      return true;<a name="line.784"></a>
+<span class="sourceLineNo">785</span>    }<a name="line.785"></a>
+<span class="sourceLineNo">786</span><a name="line.786"></a>
+<span class="sourceLineNo">787</span>    @Override<a name="line.787"></a>
+<span class="sourceLineNo">788</span>    public Collection&lt;File&gt; getChildren() throws IOException {<a name="line.788"></a>
+<span class="sourceLineNo">789</span>      // storefiles don't have children<a name="line.789"></a>
+<span class="sourceLineNo">790</span>      return Collections.emptyList();<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
+<span class="sourceLineNo">792</span><a name="line.792"></a>
+<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
+<span class="sourceLineNo">794</span>    public void close() throws IOException {<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      file.closeStoreFile(true);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
+<span class="sourceLineNo">797</span><a name="line.797"></a>
+<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    Path getPath() {<a name="line.799"></a>
+<span class="sourceLineNo">800</span>      return file.getPath();<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
+<span class="sourceLineNo">802</span>  }<a name="line.802"></a>
+<span class="sourceLineNo">803</span>}<a name="line.803"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
index 0343488..a9dcefd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/HFileArchiver.FileablePath.html
@@ -46,733 +46,769 @@
 <span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.apache.hadoop.fs.PathFilter;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.Logger;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.LoggerFactory;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.53"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HConstants;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.HStoreFile;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>/**<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * the state of the system.<a name="line.58"></a>
-<span class="sourceLineNo">059</span> */<a name="line.59"></a>
-<span class="sourceLineNo">060</span>@InterfaceAudience.Private<a name="line.60"></a>
-<span class="sourceLineNo">061</span>public class HFileArchiver {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static final String SEPARATOR = ".";<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /** Number of retries in case of fs operation failure */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      new Function&lt;File, Path&gt;() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        @Override<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        public Path apply(File file) {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>          return file == null ? null : file.getPath();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      };<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.76"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Utility class to handle the removal of HFiles (or the respective {@link HStoreFile StoreFiles})<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * for a HRegion from the {@link FileSystem}. The hfiles will be archived or deleted, depending on<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * the state of the system.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class HFileArchiver {<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final String SEPARATOR = ".";<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  /** Number of retries in case of fs operation failure */<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  private static final int DEFAULT_RETRIES_NUMBER = 3;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private static final Function&lt;File, Path&gt; FUNC_FILE_TO_PATH =<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      new Function&lt;File, Path&gt;() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        @Override<a name="line.72"></a>
+<span class="sourceLineNo">073</span>        public Path apply(File file) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>          return file == null ? null : file.getPath();<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      };<a name="line.76"></a>
 <span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private HFileArchiver() {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    // hidden ctor since this is just a util<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * @return True if the Region exits in the filesystem.<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      throws IOException {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    return fs.exists(regionDir);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * @param conf the configuration to use<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @param fs the file system object<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   * @param info RegionInfo for region to be deleted<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   */<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      throws IOException {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          the archive path)<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *         operations could not complete.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @throws IOException if the request cannot be completed<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // otherwise, we archive the files<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    // make sure we can archive<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    if (tableDir == null || regionDir == null) {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      if (regionDir != null) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // the archived files correctly or not.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      return false;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>    LOG.debug("ARCHIVING {}", regionDir);<a name="line.131"></a>
+<span class="sourceLineNo">078</span>  private static ThreadPoolExecutor archiveExecutor;<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private HFileArchiver() {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    // hidden ctor since this is just a util<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * @return True if the Region exits in the filesystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static boolean exists(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      throws IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    Path regionDir = FSUtils.getRegionDirFromRootDir(rootDir, info);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    return fs.exists(regionDir);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   * Cleans up all the files for a HRegion by archiving the HFiles to the archive directory<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * @param conf the configuration to use<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   * @param fs the file system object<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @param info RegionInfo for region to be deleted<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static void archiveRegion(Configuration conf, FileSystem fs, RegionInfo info)<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      throws IOException {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    Path rootDir = FSUtils.getRootDir(conf);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    archiveRegion(fs, rootDir, FSUtils.getTableDir(rootDir, info.getTable()),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      FSUtils.getRegionDirFromRootDir(rootDir, info));<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Remove an entire region from the table directory via archiving the region's hfiles.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param fs {@link FileSystem} from which to remove the region<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   *          the archive path)<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param tableDir {@link Path} to where the table is being stored (for building the archive path)<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * @param regionDir {@link Path} to where a region is being stored (for building the archive path)<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @return &lt;tt&gt;true&lt;/tt&gt; if the region was successfully deleted. &lt;tt&gt;false&lt;/tt&gt; if the filesystem<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *         operations could not complete.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the request cannot be completed<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static boolean archiveRegion(FileSystem fs, Path rootdir, Path tableDir, Path regionDir)<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    // otherwise, we archive the files<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // make sure we can archive<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    if (tableDir == null || regionDir == null) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      LOG.error("No archive directory could be found because tabledir (" + tableDir<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          + ") or regiondir (" + regionDir + "was null. Deleting files instead.");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      if (regionDir != null) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        deleteRegionWithoutArchiving(fs, regionDir);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // we should have archived, but failed to. Doesn't matter if we deleted<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      // the archived files correctly or not.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      return false;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
 <span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    // make sure the regiondir lives under the tabledir<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    Preconditions.checkArgument(regionDir.toString().startsWith(tableDir.toString()));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    Path regionArchiveDir = HFileArchiveUtil.getRegionArchiveDir(rootdir,<a name="line.135"></a>
... 136590 lines suppressed ...